2007-01-03 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / tree-eh.c
blob760cdc3da108899a85652b703e59b610db3194c0
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
45 void
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p = 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
57 static int
58 struct_ptr_eq (const void *a, const void *b)
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
65 static hashval_t
66 struct_ptr_hash (const void *a)
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
88 if (!region)
89 return;
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
97 struct throw_stmt_node *n;
98 void **slot;
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
115 /* ??? For the benefit of calls.c, converting all this to rtl,
116 we need to record the call expression, not just the outer
117 modify statement. */
118 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
119 && (t = get_call_expr_in (t)))
120 add_stmt_to_eh_region_fn (ifun, t, num);
123 void
124 add_stmt_to_eh_region (tree t, int num)
126 add_stmt_to_eh_region_fn (cfun, t, num);
129 bool
130 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
132 struct throw_stmt_node dummy;
133 void **slot;
135 if (!get_eh_throw_stmt_table (ifun))
136 return false;
138 dummy.stmt = t;
139 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
140 NO_INSERT);
141 if (slot)
143 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144 /* ??? For the benefit of calls.c, converting all this to rtl,
145 we need to record the call expression, not just the outer
146 modify statement. */
147 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
148 && (t = get_call_expr_in (t)))
149 remove_stmt_from_eh_region_fn (ifun, t);
150 return true;
152 else
153 return false;
156 bool
157 remove_stmt_from_eh_region (tree t)
159 return remove_stmt_from_eh_region_fn (cfun, t);
163 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
165 struct throw_stmt_node *p, n;
167 if (!get_eh_throw_stmt_table (ifun))
168 return -2;
170 n.stmt = t;
171 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
172 &n);
174 return (p ? p->region_nr : -1);
178 lookup_stmt_eh_region (tree t)
180 /* We can get called from initialized data when -fnon-call-exceptions
181 is on; prevent crash. */
182 if (!cfun)
183 return -1;
184 return lookup_stmt_eh_region_fn (cfun, t);
188 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
189 nodes and LABEL_DECL nodes. We will use this during the second phase to
190 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
192 struct finally_tree_node
194 tree child, parent;
197 /* Note that this table is *not* marked GTY. It is short-lived. */
198 static htab_t finally_tree;
200 static void
201 record_in_finally_tree (tree child, tree parent)
203 struct finally_tree_node *n;
204 void **slot;
206 n = XNEW (struct finally_tree_node);
207 n->child = child;
208 n->parent = parent;
210 slot = htab_find_slot (finally_tree, n, INSERT);
211 gcc_assert (!*slot);
212 *slot = n;
215 static void
216 collect_finally_tree (tree t, tree region)
218 tailrecurse:
219 switch (TREE_CODE (t))
221 case LABEL_EXPR:
222 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
223 break;
225 case TRY_FINALLY_EXPR:
226 record_in_finally_tree (t, region);
227 collect_finally_tree (TREE_OPERAND (t, 0), t);
228 t = TREE_OPERAND (t, 1);
229 goto tailrecurse;
231 case TRY_CATCH_EXPR:
232 collect_finally_tree (TREE_OPERAND (t, 0), region);
233 t = TREE_OPERAND (t, 1);
234 goto tailrecurse;
236 case CATCH_EXPR:
237 t = CATCH_BODY (t);
238 goto tailrecurse;
240 case EH_FILTER_EXPR:
241 t = EH_FILTER_FAILURE (t);
242 goto tailrecurse;
244 case STATEMENT_LIST:
246 tree_stmt_iterator i;
247 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
248 collect_finally_tree (tsi_stmt (i), region);
250 break;
252 default:
253 /* A type, a decl, or some kind of statement that we're not
254 interested in. Don't walk them. */
255 break;
259 /* Use the finally tree to determine if a jump from START to TARGET
260 would leave the try_finally node that START lives in. */
262 static bool
263 outside_finally_tree (tree start, tree target)
265 struct finally_tree_node n, *p;
269 n.child = start;
270 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
271 if (!p)
272 return true;
273 start = p->parent;
275 while (start != target);
277 return false;
280 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
281 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
282 The eh region creation is straight-forward, but frobbing all the gotos
283 and such into shape isn't. */
285 /* State of the world while lowering. */
287 struct leh_state
289 /* What's "current" while constructing the eh region tree. These
290 correspond to variables of the same name in cfun->eh, which we
291 don't have easy access to. */
292 struct eh_region *cur_region;
293 struct eh_region *prev_try;
295 /* Processing of TRY_FINALLY requires a bit more state. This is
296 split out into a separate structure so that we don't have to
297 copy so much when processing other nodes. */
298 struct leh_tf_state *tf;
301 struct leh_tf_state
303 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
304 is the original TRY_FINALLY_EXPR. We need to retain this so that
305 outside_finally_tree can reliably reference the tree used in the
306 collect_finally_tree data structures. */
307 tree try_finally_expr;
308 tree *top_p;
310 /* The state outside this try_finally node. */
311 struct leh_state *outer;
313 /* The exception region created for it. */
314 struct eh_region *region;
316 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
317 that are seen to escape this TRY_FINALLY_EXPR node. */
318 struct goto_queue_node {
319 tree stmt;
320 tree repl_stmt;
321 tree cont_stmt;
322 int index;
323 } *goto_queue;
324 size_t goto_queue_size;
325 size_t goto_queue_active;
327 /* The set of unique labels seen as entries in the goto queue. */
328 VEC(tree,heap) *dest_array;
330 /* A label to be added at the end of the completed transformed
331 sequence. It will be set if may_fallthru was true *at one time*,
332 though subsequent transformations may have cleared that flag. */
333 tree fallthru_label;
335 /* A label that has been registered with except.c to be the
336 landing pad for this try block. */
337 tree eh_label;
339 /* True if it is possible to fall out the bottom of the try block.
340 Cleared if the fallthru is converted to a goto. */
341 bool may_fallthru;
343 /* True if any entry in goto_queue is a RETURN_EXPR. */
344 bool may_return;
346 /* True if the finally block can receive an exception edge.
347 Cleared if the exception case is handled by code duplication. */
348 bool may_throw;
351 static void lower_eh_filter (struct leh_state *, tree *);
352 static void lower_eh_constructs_1 (struct leh_state *, tree *);
354 /* Comparison function for qsort/bsearch. We're interested in
355 searching goto queue elements for source statements. */
357 static int
358 goto_queue_cmp (const void *x, const void *y)
360 tree a = ((const struct goto_queue_node *)x)->stmt;
361 tree b = ((const struct goto_queue_node *)y)->stmt;
362 return (a == b ? 0 : a < b ? -1 : 1);
365 /* Search for STMT in the goto queue. Return the replacement,
366 or null if the statement isn't in the queue. */
368 static tree
369 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
371 struct goto_queue_node tmp, *ret;
372 tmp.stmt = stmt;
373 ret = (struct goto_queue_node *)
374 bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
375 sizeof (struct goto_queue_node), goto_queue_cmp);
376 return (ret ? ret->repl_stmt : NULL);
379 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
380 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
381 then we can just splat it in, otherwise we add the new stmts immediately
382 after the COND_EXPR and redirect. */
384 static void
385 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
386 tree_stmt_iterator *tsi)
388 tree new, one, label;
390 new = find_goto_replacement (tf, *tp);
391 if (!new)
392 return;
394 one = expr_only (new);
395 if (one && TREE_CODE (one) == GOTO_EXPR)
397 *tp = one;
398 return;
401 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
402 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
404 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
405 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
408 /* The real work of replace_goto_queue. Returns with TSI updated to
409 point to the next statement. */
411 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
413 static void
414 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
416 switch (TREE_CODE (t))
418 case GOTO_EXPR:
419 case RETURN_EXPR:
420 t = find_goto_replacement (tf, t);
421 if (t)
423 tsi_link_before (tsi, t, TSI_SAME_STMT);
424 tsi_delink (tsi);
425 return;
427 break;
429 case COND_EXPR:
430 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
431 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
432 break;
434 case TRY_FINALLY_EXPR:
435 case TRY_CATCH_EXPR:
436 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
437 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
438 break;
439 case CATCH_EXPR:
440 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
441 break;
442 case EH_FILTER_EXPR:
443 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
444 break;
446 case STATEMENT_LIST:
447 gcc_unreachable ();
449 default:
450 /* These won't have gotos in them. */
451 break;
454 tsi_next (tsi);
457 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
459 static void
460 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
462 tree_stmt_iterator i = tsi_start (t);
463 while (!tsi_end_p (i))
464 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
467 /* Replace all goto queue members. */
469 static void
470 replace_goto_queue (struct leh_tf_state *tf)
472 if (tf->goto_queue_active == 0)
473 return;
474 replace_goto_queue_stmt_list (*tf->top_p, tf);
477 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
478 node, and if so record that fact in the goto queue associated with that
479 try_finally node. */
481 static void
482 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
484 struct leh_tf_state *tf = state->tf;
485 struct goto_queue_node *q;
486 size_t active, size;
487 int index;
489 if (!tf)
490 return;
492 switch (TREE_CODE (stmt))
494 case GOTO_EXPR:
496 tree lab = GOTO_DESTINATION (stmt);
498 /* Computed and non-local gotos do not get processed. Given
499 their nature we can neither tell whether we've escaped the
500 finally block nor redirect them if we knew. */
501 if (TREE_CODE (lab) != LABEL_DECL)
502 return;
504 /* No need to record gotos that don't leave the try block. */
505 if (! outside_finally_tree (lab, tf->try_finally_expr))
506 return;
508 if (! tf->dest_array)
510 tf->dest_array = VEC_alloc (tree, heap, 10);
511 VEC_quick_push (tree, tf->dest_array, lab);
512 index = 0;
514 else
516 int n = VEC_length (tree, tf->dest_array);
517 for (index = 0; index < n; ++index)
518 if (VEC_index (tree, tf->dest_array, index) == lab)
519 break;
520 if (index == n)
521 VEC_safe_push (tree, heap, tf->dest_array, lab);
524 break;
526 case RETURN_EXPR:
527 tf->may_return = true;
528 index = -1;
529 break;
531 default:
532 gcc_unreachable ();
535 active = tf->goto_queue_active;
536 size = tf->goto_queue_size;
537 if (active >= size)
539 size = (size ? size * 2 : 32);
540 tf->goto_queue_size = size;
541 tf->goto_queue
542 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
545 q = &tf->goto_queue[active];
546 tf->goto_queue_active = active + 1;
548 memset (q, 0, sizeof (*q));
549 q->stmt = stmt;
550 q->index = index;
553 #ifdef ENABLE_CHECKING
554 /* We do not process SWITCH_EXPRs for now. As long as the original source
555 was in fact structured, and we've not yet done jump threading, then none
556 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
558 static void
559 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
561 struct leh_tf_state *tf = state->tf;
562 size_t i, n;
563 tree vec;
565 if (!tf)
566 return;
568 vec = SWITCH_LABELS (switch_expr);
569 n = TREE_VEC_LENGTH (vec);
571 for (i = 0; i < n; ++i)
573 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
574 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
577 #else
578 #define verify_norecord_switch_expr(state, switch_expr)
579 #endif
581 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
582 whatever is needed to finish the return. If MOD is non-null, insert it
583 before the new branch. RETURN_VALUE_P is a cache containing a temporary
584 variable to be used in manipulating the value returned from the function. */
586 static void
587 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
588 tree *return_value_p)
590 tree ret_expr = TREE_OPERAND (q->stmt, 0);
591 tree x;
593 if (ret_expr)
595 /* The nasty part about redirecting the return value is that the
596 return value itself is to be computed before the FINALLY block
597 is executed. e.g.
599 int x;
600 int foo (void)
602 x = 0;
603 try {
604 return x;
605 } finally {
606 x++;
610 should return 0, not 1. Arrange for this to happen by copying
611 computed the return value into a local temporary. This also
612 allows us to redirect multiple return statements through the
613 same destination block; whether this is a net win or not really
614 depends, I guess, but it does make generation of the switch in
615 lower_try_finally_switch easier. */
617 switch (TREE_CODE (ret_expr))
619 case RESULT_DECL:
620 if (!*return_value_p)
621 *return_value_p = ret_expr;
622 else
623 gcc_assert (*return_value_p == ret_expr);
624 q->cont_stmt = q->stmt;
625 break;
627 case GIMPLE_MODIFY_STMT:
629 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
630 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
632 if (!*return_value_p)
634 if (aggregate_value_p (TREE_TYPE (result),
635 TREE_TYPE (current_function_decl)))
636 /* If this function returns in memory, copy the argument
637 into the return slot now. Otherwise, we might need to
638 worry about magic return semantics, so we need to use a
639 temporary to hold the value until we're actually ready
640 to return. */
641 new = result;
642 else
643 new = create_tmp_var (TREE_TYPE (old), "rettmp");
644 *return_value_p = new;
646 else
647 new = *return_value_p;
649 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (new), new, old);
650 append_to_statement_list (x, &q->repl_stmt);
652 if (new == result)
653 x = result;
654 else
655 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (result), result, new);
656 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
659 default:
660 gcc_unreachable ();
663 else
665 /* If we don't return a value, all return statements are the same. */
666 q->cont_stmt = q->stmt;
669 if (mod)
670 append_to_statement_list (mod, &q->repl_stmt);
672 x = build1 (GOTO_EXPR, void_type_node, finlab);
673 append_to_statement_list (x, &q->repl_stmt);
676 /* Similar, but easier, for GOTO_EXPR. */
678 static void
679 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
681 tree x;
683 q->cont_stmt = q->stmt;
684 if (mod)
685 append_to_statement_list (mod, &q->repl_stmt);
687 x = build1 (GOTO_EXPR, void_type_node, finlab);
688 append_to_statement_list (x, &q->repl_stmt);
691 /* We want to transform
692 try { body; } catch { stuff; }
694 body; goto over; lab: stuff; over:
696 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
697 should be placed before the second operand, or NULL. OVER is
698 an existing label that should be put at the exit, or NULL. */
700 static void
701 frob_into_branch_around (tree *tp, tree lab, tree over)
703 tree x, op1;
705 op1 = TREE_OPERAND (*tp, 1);
706 *tp = TREE_OPERAND (*tp, 0);
708 if (block_may_fallthru (*tp))
710 if (!over)
711 over = create_artificial_label ();
712 x = build1 (GOTO_EXPR, void_type_node, over);
713 append_to_statement_list (x, tp);
716 if (lab)
718 x = build1 (LABEL_EXPR, void_type_node, lab);
719 append_to_statement_list (x, tp);
722 append_to_statement_list (op1, tp);
724 if (over)
726 x = build1 (LABEL_EXPR, void_type_node, over);
727 append_to_statement_list (x, tp);
731 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
732 Make sure to record all new labels found. */
734 static tree
735 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
737 tree region = NULL;
739 t = unsave_expr_now (t);
741 if (outer_state->tf)
742 region = outer_state->tf->try_finally_expr;
743 collect_finally_tree (t, region);
745 return t;
748 /* A subroutine of lower_try_finally. Create a fallthru label for
749 the given try_finally state. The only tricky bit here is that
750 we have to make sure to record the label in our outer context. */
752 static tree
753 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
755 tree label = tf->fallthru_label;
756 if (!label)
758 label = create_artificial_label ();
759 tf->fallthru_label = label;
760 if (tf->outer->tf)
761 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
763 return label;
766 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
767 returns non-null, then the language requires that the exception path out
768 of a try_finally be treated specially. To wit: the code within the
769 finally block may not itself throw an exception. We have two choices here.
770 First we can duplicate the finally block and wrap it in a must_not_throw
771 region. Second, we can generate code like
773 try {
774 finally_block;
775 } catch {
776 if (fintmp == eh_edge)
777 protect_cleanup_actions;
780 where "fintmp" is the temporary used in the switch statement generation
781 alternative considered below. For the nonce, we always choose the first
782 option.
784 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
786 static void
787 honor_protect_cleanup_actions (struct leh_state *outer_state,
788 struct leh_state *this_state,
789 struct leh_tf_state *tf)
791 tree protect_cleanup_actions, finally, x;
792 tree_stmt_iterator i;
793 bool finally_may_fallthru;
795 /* First check for nothing to do. */
796 if (lang_protect_cleanup_actions)
797 protect_cleanup_actions = lang_protect_cleanup_actions ();
798 else
799 protect_cleanup_actions = NULL;
801 finally = TREE_OPERAND (*tf->top_p, 1);
803 /* If the EH case of the finally block can fall through, this may be a
804 structure of the form
805 try {
806 try {
807 throw ...;
808 } cleanup {
809 try {
810 throw ...;
811 } catch (...) {
814 } catch (...) {
815 yyy;
817 E.g. with an inline destructor with an embedded try block. In this
818 case we must save the runtime EH data around the nested exception.
820 This complication means that any time the previous runtime data might
821 be used (via fallthru from the finally) we handle the eh case here,
822 whether or not protect_cleanup_actions is active. */
824 finally_may_fallthru = block_may_fallthru (finally);
825 if (!finally_may_fallthru && !protect_cleanup_actions)
826 return;
828 /* Duplicate the FINALLY block. Only need to do this for try-finally,
829 and not for cleanups. */
830 if (this_state)
831 finally = lower_try_finally_dup_block (finally, outer_state);
833 /* Resume execution after the exception. Adding this now lets
834 lower_eh_filter not add unnecessary gotos, as it is clear that
835 we never fallthru from this copy of the finally block. */
836 if (finally_may_fallthru)
838 tree save_eptr, save_filt;
840 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
841 save_filt = create_tmp_var (integer_type_node, "save_filt");
843 i = tsi_start (finally);
844 x = build0 (EXC_PTR_EXPR, ptr_type_node);
845 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, save_eptr, x);
846 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
848 x = build0 (FILTER_EXPR, integer_type_node);
849 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, save_filt, x);
850 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
852 i = tsi_last (finally);
853 x = build0 (EXC_PTR_EXPR, ptr_type_node);
854 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, x, save_eptr);
855 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
857 x = build0 (FILTER_EXPR, integer_type_node);
858 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, x, save_filt);
859 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
861 x = build_resx (get_eh_region_number (tf->region));
862 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
865 /* Wrap the block with protect_cleanup_actions as the action. */
866 if (protect_cleanup_actions)
868 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
869 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
870 EH_FILTER_MUST_NOT_THROW (x) = 1;
871 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
872 lower_eh_filter (outer_state, &finally);
874 else
875 lower_eh_constructs_1 (outer_state, &finally);
877 /* Hook this up to the end of the existing try block. If we
878 previously fell through the end, we'll have to branch around.
879 This means adding a new goto, and adding it to the queue. */
881 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
883 if (tf->may_fallthru)
885 x = lower_try_finally_fallthru_label (tf);
886 x = build1 (GOTO_EXPR, void_type_node, x);
887 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
889 if (this_state)
890 maybe_record_in_goto_queue (this_state, x);
892 tf->may_fallthru = false;
895 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
896 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
897 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
899 /* Having now been handled, EH isn't to be considered with
900 the rest of the outgoing edges. */
901 tf->may_throw = false;
904 /* A subroutine of lower_try_finally. We have determined that there is
905 no fallthru edge out of the finally block. This means that there is
906 no outgoing edge corresponding to any incoming edge. Restructure the
907 try_finally node for this special case. */
909 static void
910 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
912 tree x, finally, lab, return_val;
913 struct goto_queue_node *q, *qe;
915 if (tf->may_throw)
916 lab = tf->eh_label;
917 else
918 lab = create_artificial_label ();
920 finally = TREE_OPERAND (*tf->top_p, 1);
921 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
923 x = build1 (LABEL_EXPR, void_type_node, lab);
924 append_to_statement_list (x, tf->top_p);
926 return_val = NULL;
927 q = tf->goto_queue;
928 qe = q + tf->goto_queue_active;
929 for (; q < qe; ++q)
930 if (q->index < 0)
931 do_return_redirection (q, lab, NULL, &return_val);
932 else
933 do_goto_redirection (q, lab, NULL);
935 replace_goto_queue (tf);
937 lower_eh_constructs_1 (state, &finally);
938 append_to_statement_list (finally, tf->top_p);
941 /* A subroutine of lower_try_finally. We have determined that there is
942 exactly one destination of the finally block. Restructure the
943 try_finally node for this special case. */
945 static void
946 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
948 struct goto_queue_node *q, *qe;
949 tree x, finally, finally_label;
951 finally = TREE_OPERAND (*tf->top_p, 1);
952 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
954 lower_eh_constructs_1 (state, &finally);
956 if (tf->may_throw)
958 /* Only reachable via the exception edge. Add the given label to
959 the head of the FINALLY block. Append a RESX at the end. */
961 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
962 append_to_statement_list (x, tf->top_p);
964 append_to_statement_list (finally, tf->top_p);
966 x = build_resx (get_eh_region_number (tf->region));
968 append_to_statement_list (x, tf->top_p);
970 return;
973 if (tf->may_fallthru)
975 /* Only reachable via the fallthru edge. Do nothing but let
976 the two blocks run together; we'll fall out the bottom. */
977 append_to_statement_list (finally, tf->top_p);
978 return;
981 finally_label = create_artificial_label ();
982 x = build1 (LABEL_EXPR, void_type_node, finally_label);
983 append_to_statement_list (x, tf->top_p);
985 append_to_statement_list (finally, tf->top_p);
987 q = tf->goto_queue;
988 qe = q + tf->goto_queue_active;
990 if (tf->may_return)
992 /* Reachable by return expressions only. Redirect them. */
993 tree return_val = NULL;
994 for (; q < qe; ++q)
995 do_return_redirection (q, finally_label, NULL, &return_val);
996 replace_goto_queue (tf);
998 else
1000 /* Reachable by goto expressions only. Redirect them. */
1001 for (; q < qe; ++q)
1002 do_goto_redirection (q, finally_label, NULL);
1003 replace_goto_queue (tf);
1005 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1007 /* Reachable by goto to fallthru label only. Redirect it
1008 to the new label (already created, sadly), and do not
1009 emit the final branch out, or the fallthru label. */
1010 tf->fallthru_label = NULL;
1011 return;
1015 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1016 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1019 /* A subroutine of lower_try_finally. There are multiple edges incoming
1020 and outgoing from the finally block. Implement this by duplicating the
1021 finally block for every destination. */
1023 static void
1024 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1026 tree finally, new_stmt;
1027 tree x;
1029 finally = TREE_OPERAND (*tf->top_p, 1);
1030 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1032 new_stmt = NULL_TREE;
1034 if (tf->may_fallthru)
1036 x = lower_try_finally_dup_block (finally, state);
1037 lower_eh_constructs_1 (state, &x);
1038 append_to_statement_list (x, &new_stmt);
1040 x = lower_try_finally_fallthru_label (tf);
1041 x = build1 (GOTO_EXPR, void_type_node, x);
1042 append_to_statement_list (x, &new_stmt);
1045 if (tf->may_throw)
1047 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1048 append_to_statement_list (x, &new_stmt);
1050 x = lower_try_finally_dup_block (finally, state);
1051 lower_eh_constructs_1 (state, &x);
1052 append_to_statement_list (x, &new_stmt);
1054 x = build_resx (get_eh_region_number (tf->region));
1055 append_to_statement_list (x, &new_stmt);
1058 if (tf->goto_queue)
1060 struct goto_queue_node *q, *qe;
1061 tree return_val = NULL;
1062 int return_index, index;
1063 struct labels_s
1065 struct goto_queue_node *q;
1066 tree label;
1067 } *labels;
1069 return_index = VEC_length (tree, tf->dest_array);
1070 labels = XCNEWVEC (struct labels_s, return_index + 1);
1072 q = tf->goto_queue;
1073 qe = q + tf->goto_queue_active;
1074 for (; q < qe; q++)
1076 index = q->index < 0 ? return_index : q->index;
1078 if (!labels[index].q)
1079 labels[index].q = q;
1082 for (index = 0; index < return_index + 1; index++)
1084 tree lab;
1086 q = labels[index].q;
1087 if (! q)
1088 continue;
1090 lab = labels[index].label = create_artificial_label ();
1092 if (index == return_index)
1093 do_return_redirection (q, lab, NULL, &return_val);
1094 else
1095 do_goto_redirection (q, lab, NULL);
1097 x = build1 (LABEL_EXPR, void_type_node, lab);
1098 append_to_statement_list (x, &new_stmt);
1100 x = lower_try_finally_dup_block (finally, state);
1101 lower_eh_constructs_1 (state, &x);
1102 append_to_statement_list (x, &new_stmt);
1104 append_to_statement_list (q->cont_stmt, &new_stmt);
1105 maybe_record_in_goto_queue (state, q->cont_stmt);
1108 for (q = tf->goto_queue; q < qe; q++)
1110 tree lab;
1112 index = q->index < 0 ? return_index : q->index;
1114 if (labels[index].q == q)
1115 continue;
1117 lab = labels[index].label;
1119 if (index == return_index)
1120 do_return_redirection (q, lab, NULL, &return_val);
1121 else
1122 do_goto_redirection (q, lab, NULL);
1125 replace_goto_queue (tf);
1126 free (labels);
1129 /* Need to link new stmts after running replace_goto_queue due
1130 to not wanting to process the same goto stmts twice. */
1131 append_to_statement_list (new_stmt, tf->top_p);
1134 /* A subroutine of lower_try_finally. There are multiple edges incoming
1135 and outgoing from the finally block. Implement this by instrumenting
1136 each incoming edge and creating a switch statement at the end of the
1137 finally block that branches to the appropriate destination. */
1139 static void
1140 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1142 struct goto_queue_node *q, *qe;
1143 tree return_val = NULL;
1144 tree finally, finally_tmp, finally_label;
1145 int return_index, eh_index, fallthru_index;
1146 int nlabels, ndests, j, last_case_index;
1147 tree case_label_vec, switch_stmt, last_case, switch_body;
1148 tree x;
1150 /* Mash the TRY block to the head of the chain. */
1151 finally = TREE_OPERAND (*tf->top_p, 1);
1152 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1154 /* Lower the finally block itself. */
1155 lower_eh_constructs_1 (state, &finally);
1157 /* Prepare for switch statement generation. */
1158 nlabels = VEC_length (tree, tf->dest_array);
1159 return_index = nlabels;
1160 eh_index = return_index + tf->may_return;
1161 fallthru_index = eh_index + tf->may_throw;
1162 ndests = fallthru_index + tf->may_fallthru;
1164 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1165 finally_label = create_artificial_label ();
1167 case_label_vec = make_tree_vec (ndests);
1168 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1169 NULL_TREE, case_label_vec);
1170 switch_body = NULL;
1171 last_case = NULL;
1172 last_case_index = 0;
1174 /* Begin inserting code for getting to the finally block. Things
1175 are done in this order to correspond to the sequence the code is
1176 layed out. */
1178 if (tf->may_fallthru)
1180 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1181 build_int_cst (NULL_TREE, fallthru_index));
1182 append_to_statement_list (x, tf->top_p);
1184 if (tf->may_throw)
1186 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1187 append_to_statement_list (x, tf->top_p);
1191 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1192 build_int_cst (NULL_TREE, fallthru_index), NULL,
1193 create_artificial_label ());
1194 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1195 last_case_index++;
1197 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1198 append_to_statement_list (x, &switch_body);
1200 x = lower_try_finally_fallthru_label (tf);
1201 x = build1 (GOTO_EXPR, void_type_node, x);
1202 append_to_statement_list (x, &switch_body);
1205 if (tf->may_throw)
1207 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1208 append_to_statement_list (x, tf->top_p);
1210 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1211 build_int_cst (NULL_TREE, eh_index));
1212 append_to_statement_list (x, tf->top_p);
1214 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1215 build_int_cst (NULL_TREE, eh_index), NULL,
1216 create_artificial_label ());
1217 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1218 last_case_index++;
1220 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1221 append_to_statement_list (x, &switch_body);
1222 x = build_resx (get_eh_region_number (tf->region));
1223 append_to_statement_list (x, &switch_body);
1226 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1227 append_to_statement_list (x, tf->top_p);
1229 append_to_statement_list (finally, tf->top_p);
1231 /* Redirect each incoming goto edge. */
1232 q = tf->goto_queue;
1233 qe = q + tf->goto_queue_active;
1234 j = last_case_index + tf->may_return;
1235 for (; q < qe; ++q)
1237 tree mod;
1238 int switch_id, case_index;
1240 if (q->index < 0)
1242 mod = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1243 build_int_cst (NULL_TREE, return_index));
1244 do_return_redirection (q, finally_label, mod, &return_val);
1245 switch_id = return_index;
1247 else
1249 mod = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1250 build_int_cst (NULL_TREE, q->index));
1251 do_goto_redirection (q, finally_label, mod);
1252 switch_id = q->index;
1255 case_index = j + q->index;
1256 if (!TREE_VEC_ELT (case_label_vec, case_index))
1257 TREE_VEC_ELT (case_label_vec, case_index)
1258 = build3 (CASE_LABEL_EXPR, void_type_node,
1259 build_int_cst (NULL_TREE, switch_id), NULL,
1260 /* We store the cont_stmt in the
1261 CASE_LABEL, so that we can recover it
1262 in the loop below. We don't create
1263 the new label while walking the
1264 goto_queue because pointers don't
1265 offer a stable order. */
1266 q->cont_stmt);
1268 for (j = last_case_index; j < last_case_index + nlabels; j++)
1270 tree label;
1271 tree cont_stmt;
1273 last_case = TREE_VEC_ELT (case_label_vec, j);
1275 gcc_assert (last_case);
1277 cont_stmt = CASE_LABEL (last_case);
1279 label = create_artificial_label ();
1280 CASE_LABEL (last_case) = label;
1282 x = build1 (LABEL_EXPR, void_type_node, label);
1283 append_to_statement_list (x, &switch_body);
1284 append_to_statement_list (cont_stmt, &switch_body);
1285 maybe_record_in_goto_queue (state, cont_stmt);
1287 replace_goto_queue (tf);
1289 /* Make sure that the last case is the default label, as one is required.
1290 Then sort the labels, which is also required in GIMPLE. */
1291 CASE_LOW (last_case) = NULL;
1292 sort_case_labels (case_label_vec);
1294 /* Need to link switch_stmt after running replace_goto_queue due
1295 to not wanting to process the same goto stmts twice. */
1296 append_to_statement_list (switch_stmt, tf->top_p);
1297 append_to_statement_list (switch_body, tf->top_p);
1300 /* Decide whether or not we are going to duplicate the finally block.
1301 There are several considerations.
1303 First, if this is Java, then the finally block contains code
1304 written by the user. It has line numbers associated with it,
1305 so duplicating the block means it's difficult to set a breakpoint.
1306 Since controlling code generation via -g is verboten, we simply
1307 never duplicate code without optimization.
1309 Second, we'd like to prevent egregious code growth. One way to
1310 do this is to estimate the size of the finally block, multiply
1311 that by the number of copies we'd need to make, and compare against
1312 the estimate of the size of the switch machinery we'd have to add. */
1314 static bool
1315 decide_copy_try_finally (int ndests, tree finally)
1317 int f_estimate, sw_estimate;
1319 if (!optimize)
1320 return false;
1322 /* Finally estimate N times, plus N gotos. */
1323 f_estimate = estimate_num_insns (finally);
1324 f_estimate = (f_estimate + 1) * ndests;
1326 /* Switch statement (cost 10), N variable assignments, N gotos. */
1327 sw_estimate = 10 + 2 * ndests;
1329 /* Optimize for size clearly wants our best guess. */
1330 if (optimize_size)
1331 return f_estimate < sw_estimate;
1333 /* ??? These numbers are completely made up so far. */
1334 if (optimize > 1)
1335 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1336 else
1337 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1340 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1341 to a sequence of labels and blocks, plus the exception region trees
1342 that record all the magic. This is complicated by the need to
1343 arrange for the FINALLY block to be executed on all exits. */
1345 static void
1346 lower_try_finally (struct leh_state *state, tree *tp)
1348 struct leh_tf_state this_tf;
1349 struct leh_state this_state;
1350 int ndests;
1352 /* Process the try block. */
1354 memset (&this_tf, 0, sizeof (this_tf));
1355 this_tf.try_finally_expr = *tp;
1356 this_tf.top_p = tp;
1357 this_tf.outer = state;
1358 if (using_eh_for_cleanups_p)
1359 this_tf.region
1360 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1361 else
1362 this_tf.region = NULL;
1364 this_state.cur_region = this_tf.region;
1365 this_state.prev_try = state->prev_try;
1366 this_state.tf = &this_tf;
1368 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1370 /* Determine if the try block is escaped through the bottom. */
1371 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1373 /* Determine if any exceptions are possible within the try block. */
1374 if (using_eh_for_cleanups_p)
1375 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1376 if (this_tf.may_throw)
1378 this_tf.eh_label = create_artificial_label ();
1379 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1380 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1383 /* Sort the goto queue for efficient searching later. */
1384 if (this_tf.goto_queue_active > 1)
1385 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1386 sizeof (struct goto_queue_node), goto_queue_cmp);
1388 /* Determine how many edges (still) reach the finally block. Or rather,
1389 how many destinations are reached by the finally block. Use this to
1390 determine how we process the finally block itself. */
1392 ndests = VEC_length (tree, this_tf.dest_array);
1393 ndests += this_tf.may_fallthru;
1394 ndests += this_tf.may_return;
1395 ndests += this_tf.may_throw;
1397 /* If the FINALLY block is not reachable, dike it out. */
1398 if (ndests == 0)
1399 *tp = TREE_OPERAND (*tp, 0);
1401 /* If the finally block doesn't fall through, then any destination
1402 we might try to impose there isn't reached either. There may be
1403 some minor amount of cleanup and redirection still needed. */
1404 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1405 lower_try_finally_nofallthru (state, &this_tf);
1407 /* We can easily special-case redirection to a single destination. */
1408 else if (ndests == 1)
1409 lower_try_finally_onedest (state, &this_tf);
1411 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1412 lower_try_finally_copy (state, &this_tf);
1413 else
1414 lower_try_finally_switch (state, &this_tf);
1416 /* If someone requested we add a label at the end of the transformed
1417 block, do so. */
1418 if (this_tf.fallthru_label)
1420 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1421 append_to_statement_list (x, tp);
1424 VEC_free (tree, heap, this_tf.dest_array);
1425 if (this_tf.goto_queue)
1426 free (this_tf.goto_queue);
1429 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1430 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1431 exception region trees that record all the magic. */
1433 static void
1434 lower_catch (struct leh_state *state, tree *tp)
1436 struct eh_region *try_region;
1437 struct leh_state this_state;
1438 tree_stmt_iterator i;
1439 tree out_label;
1441 try_region = gen_eh_region_try (state->cur_region);
1442 this_state.cur_region = try_region;
1443 this_state.prev_try = try_region;
1444 this_state.tf = state->tf;
1446 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1448 if (!get_eh_region_may_contain_throw (try_region))
1450 *tp = TREE_OPERAND (*tp, 0);
1451 return;
1454 out_label = NULL;
1455 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1457 struct eh_region *catch_region;
1458 tree catch, x, eh_label;
1460 catch = tsi_stmt (i);
1461 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1463 this_state.cur_region = catch_region;
1464 this_state.prev_try = state->prev_try;
1465 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1467 eh_label = create_artificial_label ();
1468 set_eh_region_tree_label (catch_region, eh_label);
1470 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1471 tsi_link_before (&i, x, TSI_SAME_STMT);
1473 if (block_may_fallthru (CATCH_BODY (catch)))
1475 if (!out_label)
1476 out_label = create_artificial_label ();
1478 x = build1 (GOTO_EXPR, void_type_node, out_label);
1479 append_to_statement_list (x, &CATCH_BODY (catch));
1482 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1483 tsi_delink (&i);
1486 frob_into_branch_around (tp, NULL, out_label);
1489 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1490 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1491 region trees that record all the magic. */
1493 static void
1494 lower_eh_filter (struct leh_state *state, tree *tp)
1496 struct leh_state this_state;
1497 struct eh_region *this_region;
1498 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1499 tree eh_label;
1501 if (EH_FILTER_MUST_NOT_THROW (inner))
1502 this_region = gen_eh_region_must_not_throw (state->cur_region);
1503 else
1504 this_region = gen_eh_region_allowed (state->cur_region,
1505 EH_FILTER_TYPES (inner));
1506 this_state = *state;
1507 this_state.cur_region = this_region;
1509 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1511 if (!get_eh_region_may_contain_throw (this_region))
1513 *tp = TREE_OPERAND (*tp, 0);
1514 return;
1517 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1518 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1520 eh_label = create_artificial_label ();
1521 set_eh_region_tree_label (this_region, eh_label);
1523 frob_into_branch_around (tp, eh_label, NULL);
1526 /* Implement a cleanup expression. This is similar to try-finally,
1527 except that we only execute the cleanup block for exception edges. */
1529 static void
1530 lower_cleanup (struct leh_state *state, tree *tp)
1532 struct leh_state this_state;
1533 struct eh_region *this_region;
1534 struct leh_tf_state fake_tf;
1536 /* If not using eh, then exception-only cleanups are no-ops. */
1537 if (!flag_exceptions)
1539 *tp = TREE_OPERAND (*tp, 0);
1540 lower_eh_constructs_1 (state, tp);
1541 return;
1544 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1545 this_state = *state;
1546 this_state.cur_region = this_region;
1548 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1550 if (!get_eh_region_may_contain_throw (this_region))
1552 *tp = TREE_OPERAND (*tp, 0);
1553 return;
1556 /* Build enough of a try-finally state so that we can reuse
1557 honor_protect_cleanup_actions. */
1558 memset (&fake_tf, 0, sizeof (fake_tf));
1559 fake_tf.top_p = tp;
1560 fake_tf.outer = state;
1561 fake_tf.region = this_region;
1562 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1563 fake_tf.may_throw = true;
1565 fake_tf.eh_label = create_artificial_label ();
1566 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1568 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1570 if (fake_tf.may_throw)
1572 /* In this case honor_protect_cleanup_actions had nothing to do,
1573 and we should process this normally. */
1574 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1575 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1577 else
1579 /* In this case honor_protect_cleanup_actions did nearly all of
1580 the work. All we have left is to append the fallthru_label. */
1582 *tp = TREE_OPERAND (*tp, 0);
1583 if (fake_tf.fallthru_label)
1585 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1586 append_to_statement_list (x, tp);
1591 /* Main loop for lowering eh constructs. */
1593 static void
1594 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1596 tree_stmt_iterator i;
1597 tree t = *tp;
1599 switch (TREE_CODE (t))
1601 case COND_EXPR:
1602 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1603 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1604 break;
1606 case CALL_EXPR:
1607 /* Look for things that can throw exceptions, and record them. */
1608 if (state->cur_region && tree_could_throw_p (t))
1610 record_stmt_eh_region (state->cur_region, t);
1611 note_eh_region_may_contain_throw (state->cur_region);
1613 break;
1615 case GIMPLE_MODIFY_STMT:
1616 /* Look for things that can throw exceptions, and record them. */
1617 if (state->cur_region && tree_could_throw_p (t))
1619 record_stmt_eh_region (state->cur_region, t);
1620 note_eh_region_may_contain_throw (state->cur_region);
1622 break;
1624 case GOTO_EXPR:
1625 case RETURN_EXPR:
1626 maybe_record_in_goto_queue (state, t);
1627 break;
1628 case SWITCH_EXPR:
1629 verify_norecord_switch_expr (state, t);
1630 break;
1632 case TRY_FINALLY_EXPR:
1633 lower_try_finally (state, tp);
1634 break;
1636 case TRY_CATCH_EXPR:
1637 i = tsi_start (TREE_OPERAND (t, 1));
1638 switch (TREE_CODE (tsi_stmt (i)))
1640 case CATCH_EXPR:
1641 lower_catch (state, tp);
1642 break;
1643 case EH_FILTER_EXPR:
1644 lower_eh_filter (state, tp);
1645 break;
1646 default:
1647 lower_cleanup (state, tp);
1648 break;
1650 break;
1652 case STATEMENT_LIST:
1653 for (i = tsi_start (t); !tsi_end_p (i); )
1655 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1656 t = tsi_stmt (i);
1657 if (TREE_CODE (t) == STATEMENT_LIST)
1659 tsi_link_before (&i, t, TSI_SAME_STMT);
1660 tsi_delink (&i);
1662 else
1663 tsi_next (&i);
1665 break;
1667 default:
1668 /* A type, a decl, or some kind of statement that we're not
1669 interested in. Don't walk them. */
1670 break;
1674 static unsigned int
1675 lower_eh_constructs (void)
1677 struct leh_state null_state;
1678 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1680 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1682 collect_finally_tree (*tp, NULL);
1684 memset (&null_state, 0, sizeof (null_state));
1685 lower_eh_constructs_1 (&null_state, tp);
1687 htab_delete (finally_tree);
1689 collect_eh_region_array ();
1690 return 0;
1693 struct tree_opt_pass pass_lower_eh =
1695 "eh", /* name */
1696 NULL, /* gate */
1697 lower_eh_constructs, /* execute */
1698 NULL, /* sub */
1699 NULL, /* next */
1700 0, /* static_pass_number */
1701 TV_TREE_EH, /* tv_id */
1702 PROP_gimple_lcf, /* properties_required */
1703 PROP_gimple_leh, /* properties_provided */
1704 0, /* properties_destroyed */
1705 0, /* todo_flags_start */
1706 TODO_dump_func, /* todo_flags_finish */
1707 0 /* letter */
1711 /* Construct EH edges for STMT. */
1713 static void
1714 make_eh_edge (struct eh_region *region, void *data)
1716 tree stmt, lab;
1717 basic_block src, dst;
1719 stmt = (tree) data;
1720 lab = get_eh_region_tree_label (region);
1722 src = bb_for_stmt (stmt);
1723 dst = label_to_block (lab);
1725 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1728 void
1729 make_eh_edges (tree stmt)
1731 int region_nr;
1732 bool is_resx;
1734 if (TREE_CODE (stmt) == RESX_EXPR)
1736 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1737 is_resx = true;
1739 else
1741 region_nr = lookup_stmt_eh_region (stmt);
1742 if (region_nr < 0)
1743 return;
1744 is_resx = false;
1747 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1750 static bool mark_eh_edge_found_error;
1752 /* Mark edge make_eh_edge would create for given region by setting it aux
1753 field, output error if something goes wrong. */
1754 static void
1755 mark_eh_edge (struct eh_region *region, void *data)
1757 tree stmt, lab;
1758 basic_block src, dst;
1759 edge e;
1761 stmt = (tree) data;
1762 lab = get_eh_region_tree_label (region);
1764 src = bb_for_stmt (stmt);
1765 dst = label_to_block (lab);
1767 e = find_edge (src, dst);
1768 if (!e)
1770 error ("EH edge %i->%i is missing", src->index, dst->index);
1771 mark_eh_edge_found_error = true;
1773 else if (!(e->flags & EDGE_EH))
1775 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1776 mark_eh_edge_found_error = true;
1778 else if (e->aux)
1780 /* ??? might not be mistake. */
1781 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1782 mark_eh_edge_found_error = true;
1784 else
1785 e->aux = (void *)1;
1788 /* Verify that BB containing stmt as last stmt has precisely the edges
1789 make_eh_edges would create. */
1790 bool
1791 verify_eh_edges (tree stmt)
1793 int region_nr;
1794 bool is_resx;
1795 basic_block bb = bb_for_stmt (stmt);
1796 edge_iterator ei;
1797 edge e;
1799 FOR_EACH_EDGE (e, ei, bb->succs)
1800 gcc_assert (!e->aux);
1801 mark_eh_edge_found_error = false;
1802 if (TREE_CODE (stmt) == RESX_EXPR)
1804 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1805 is_resx = true;
1807 else
1809 region_nr = lookup_stmt_eh_region (stmt);
1810 if (region_nr < 0)
1812 FOR_EACH_EDGE (e, ei, bb->succs)
1813 if (e->flags & EDGE_EH)
1815 error ("BB %i can not throw but has EH edges", bb->index);
1816 return true;
1818 return false;
1820 if (!tree_could_throw_p (stmt))
1822 error ("BB %i last statement has incorrectly set region", bb->index);
1823 return true;
1825 is_resx = false;
1828 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1829 FOR_EACH_EDGE (e, ei, bb->succs)
1831 if ((e->flags & EDGE_EH) && !e->aux)
1833 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1834 mark_eh_edge_found_error = true;
1835 return true;
1837 e->aux = NULL;
1839 return mark_eh_edge_found_error;
1843 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1844 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1845 This routine expects only GIMPLE lhs or rhs input. */
1847 bool
1848 tree_could_trap_p (tree expr)
1850 enum tree_code code = TREE_CODE (expr);
1851 bool honor_nans = false;
1852 bool honor_snans = false;
1853 bool fp_operation = false;
1854 bool honor_trapv = false;
1855 tree t, base;
1857 if (TREE_CODE_CLASS (code) == tcc_comparison
1858 || TREE_CODE_CLASS (code) == tcc_unary
1859 || TREE_CODE_CLASS (code) == tcc_binary)
1861 t = TREE_TYPE (expr);
1862 fp_operation = FLOAT_TYPE_P (t);
1863 if (fp_operation)
1865 honor_nans = flag_trapping_math && !flag_finite_math_only;
1866 honor_snans = flag_signaling_nans != 0;
1868 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1869 honor_trapv = true;
1872 restart:
1873 switch (code)
1875 case TARGET_MEM_REF:
1876 /* For TARGET_MEM_REFs use the information based on the original
1877 reference. */
1878 expr = TMR_ORIGINAL (expr);
1879 code = TREE_CODE (expr);
1880 goto restart;
1882 case COMPONENT_REF:
1883 case REALPART_EXPR:
1884 case IMAGPART_EXPR:
1885 case BIT_FIELD_REF:
1886 case WITH_SIZE_EXPR:
1887 expr = TREE_OPERAND (expr, 0);
1888 code = TREE_CODE (expr);
1889 goto restart;
1891 case ARRAY_RANGE_REF:
1892 base = TREE_OPERAND (expr, 0);
1893 if (tree_could_trap_p (base))
1894 return true;
1896 if (TREE_THIS_NOTRAP (expr))
1897 return false;
1899 return !range_in_array_bounds_p (expr);
1901 case ARRAY_REF:
1902 base = TREE_OPERAND (expr, 0);
1903 if (tree_could_trap_p (base))
1904 return true;
1906 if (TREE_THIS_NOTRAP (expr))
1907 return false;
1909 return !in_array_bounds_p (expr);
1911 case INDIRECT_REF:
1912 case ALIGN_INDIRECT_REF:
1913 case MISALIGNED_INDIRECT_REF:
1914 return !TREE_THIS_NOTRAP (expr);
1916 case ASM_EXPR:
1917 return TREE_THIS_VOLATILE (expr);
1919 case TRUNC_DIV_EXPR:
1920 case CEIL_DIV_EXPR:
1921 case FLOOR_DIV_EXPR:
1922 case ROUND_DIV_EXPR:
1923 case EXACT_DIV_EXPR:
1924 case CEIL_MOD_EXPR:
1925 case FLOOR_MOD_EXPR:
1926 case ROUND_MOD_EXPR:
1927 case TRUNC_MOD_EXPR:
1928 case RDIV_EXPR:
1929 if (honor_snans || honor_trapv)
1930 return true;
1931 if (fp_operation)
1932 return flag_trapping_math;
1933 t = TREE_OPERAND (expr, 1);
1934 if (!TREE_CONSTANT (t) || integer_zerop (t))
1935 return true;
1936 return false;
1938 case LT_EXPR:
1939 case LE_EXPR:
1940 case GT_EXPR:
1941 case GE_EXPR:
1942 case LTGT_EXPR:
1943 /* Some floating point comparisons may trap. */
1944 return honor_nans;
1946 case EQ_EXPR:
1947 case NE_EXPR:
1948 case UNORDERED_EXPR:
1949 case ORDERED_EXPR:
1950 case UNLT_EXPR:
1951 case UNLE_EXPR:
1952 case UNGT_EXPR:
1953 case UNGE_EXPR:
1954 case UNEQ_EXPR:
1955 return honor_snans;
1957 case CONVERT_EXPR:
1958 case FIX_TRUNC_EXPR:
1959 /* Conversion of floating point might trap. */
1960 return honor_nans;
1962 case NEGATE_EXPR:
1963 case ABS_EXPR:
1964 case CONJ_EXPR:
1965 /* These operations don't trap with floating point. */
1966 if (honor_trapv)
1967 return true;
1968 return false;
1970 case PLUS_EXPR:
1971 case MINUS_EXPR:
1972 case MULT_EXPR:
1973 /* Any floating arithmetic may trap. */
1974 if (fp_operation && flag_trapping_math)
1975 return true;
1976 if (honor_trapv)
1977 return true;
1978 return false;
1980 case CALL_EXPR:
1981 t = get_callee_fndecl (expr);
1982 /* Assume that calls to weak functions may trap. */
1983 if (!t || !DECL_P (t) || DECL_WEAK (t))
1984 return true;
1985 return false;
1987 default:
1988 /* Any floating arithmetic may trap. */
1989 if (fp_operation && flag_trapping_math)
1990 return true;
1991 return false;
1995 bool
1996 tree_could_throw_p (tree t)
1998 if (!flag_exceptions)
1999 return false;
2000 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2002 if (flag_non_call_exceptions
2003 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2004 return true;
2005 t = GIMPLE_STMT_OPERAND (t, 1);
2008 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2009 t = TREE_OPERAND (t, 0);
2010 if (TREE_CODE (t) == CALL_EXPR)
2011 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2012 if (flag_non_call_exceptions)
2013 return tree_could_trap_p (t);
2014 return false;
2017 bool
2018 tree_can_throw_internal (tree stmt)
2020 int region_nr;
2021 bool is_resx = false;
2023 if (TREE_CODE (stmt) == RESX_EXPR)
2024 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2025 else
2026 region_nr = lookup_stmt_eh_region (stmt);
2027 if (region_nr < 0)
2028 return false;
2029 return can_throw_internal_1 (region_nr, is_resx);
2032 bool
2033 tree_can_throw_external (tree stmt)
2035 int region_nr;
2036 bool is_resx = false;
2038 if (TREE_CODE (stmt) == RESX_EXPR)
2039 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2040 else
2041 region_nr = lookup_stmt_eh_region (stmt);
2042 if (region_nr < 0)
2043 return tree_could_throw_p (stmt);
2044 else
2045 return can_throw_external_1 (region_nr, is_resx);
2048 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2049 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2050 in the table if it should be in there. Return TRUE if a replacement was
2051 done that my require an EH edge purge. */
2053 bool
2054 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2056 int region_nr = lookup_stmt_eh_region (old_stmt);
2058 if (region_nr >= 0)
2060 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2062 if (new_stmt == old_stmt && new_stmt_could_throw)
2063 return false;
2065 remove_stmt_from_eh_region (old_stmt);
2066 if (new_stmt_could_throw)
2068 add_stmt_to_eh_region (new_stmt, region_nr);
2069 return false;
2071 else
2072 return true;
2075 return false;
2078 #ifdef ENABLE_CHECKING
2079 static int
2080 verify_eh_throw_stmt_node (void **slot, void *data ATTRIBUTE_UNUSED)
2082 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
2084 gcc_assert (node->stmt->base.ann == NULL);
2085 return 1;
2088 void
2089 verify_eh_throw_table_statements (void)
2091 if (!get_eh_throw_stmt_table (cfun))
2092 return;
2093 htab_traverse (get_eh_throw_stmt_table (cfun),
2094 verify_eh_throw_stmt_node,
2095 NULL);
2098 #endif