2008-05-30 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-eh.c
blob9428e38c54e30eb3faf69092cde36de0887debc5
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "except.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
35 #include "timevar.h"
36 #include "langhooks.h"
37 #include "ggc.h"
38 #include "toplev.h"
39 #include "pointer-set.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
45 void
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p = 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
57 static int
58 struct_ptr_eq (const void *a, const void *b)
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
65 static hashval_t
66 struct_ptr_hash (const void *a)
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
88 if (!region)
89 return;
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
97 struct throw_stmt_node *n;
98 void **slot;
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
117 void
118 add_stmt_to_eh_region (tree t, int num)
120 add_stmt_to_eh_region_fn (cfun, t, num);
123 bool
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
126 struct throw_stmt_node dummy;
127 void **slot;
129 if (!get_eh_throw_stmt_table (ifun))
130 return false;
132 dummy.stmt = t;
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
134 NO_INSERT);
135 if (slot)
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
138 return true;
140 else
141 return false;
144 bool
145 remove_stmt_from_eh_region (tree t)
147 return remove_stmt_from_eh_region_fn (cfun, t);
151 lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
153 struct throw_stmt_node *p, n;
155 if (!get_eh_throw_stmt_table (ifun))
156 return -2;
158 /* The CONST_CAST is okay because we don't modify n.stmt throughout
159 its scope, or the scope of p. */
160 n.stmt = CONST_CAST_TREE (t);
161 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
162 &n);
164 return (p ? p->region_nr : -1);
168 lookup_stmt_eh_region (const_tree t)
170 /* We can get called from initialized data when -fnon-call-exceptions
171 is on; prevent crash. */
172 if (!cfun)
173 return -1;
174 return lookup_stmt_eh_region_fn (cfun, t);
178 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
179 nodes and LABEL_DECL nodes. We will use this during the second phase to
180 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
182 struct finally_tree_node
184 tree child, parent;
187 /* Note that this table is *not* marked GTY. It is short-lived. */
188 static htab_t finally_tree;
190 static void
191 record_in_finally_tree (tree child, tree parent)
193 struct finally_tree_node *n;
194 void **slot;
196 n = XNEW (struct finally_tree_node);
197 n->child = child;
198 n->parent = parent;
200 slot = htab_find_slot (finally_tree, n, INSERT);
201 gcc_assert (!*slot);
202 *slot = n;
205 static void
206 collect_finally_tree (tree t, tree region)
208 tailrecurse:
209 switch (TREE_CODE (t))
211 case LABEL_EXPR:
212 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
213 break;
215 case TRY_FINALLY_EXPR:
216 record_in_finally_tree (t, region);
217 collect_finally_tree (TREE_OPERAND (t, 0), t);
218 t = TREE_OPERAND (t, 1);
219 goto tailrecurse;
221 case TRY_CATCH_EXPR:
222 collect_finally_tree (TREE_OPERAND (t, 0), region);
223 t = TREE_OPERAND (t, 1);
224 goto tailrecurse;
226 case CATCH_EXPR:
227 t = CATCH_BODY (t);
228 goto tailrecurse;
230 case EH_FILTER_EXPR:
231 t = EH_FILTER_FAILURE (t);
232 goto tailrecurse;
234 case STATEMENT_LIST:
236 tree_stmt_iterator i;
237 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
238 collect_finally_tree (tsi_stmt (i), region);
240 break;
242 default:
243 /* A type, a decl, or some kind of statement that we're not
244 interested in. Don't walk them. */
245 break;
249 /* Use the finally tree to determine if a jump from START to TARGET
250 would leave the try_finally node that START lives in. */
252 static bool
253 outside_finally_tree (tree start, tree target)
255 struct finally_tree_node n, *p;
259 n.child = start;
260 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
261 if (!p)
262 return true;
263 start = p->parent;
265 while (start != target);
267 return false;
270 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
271 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
272 The eh region creation is straight-forward, but frobbing all the gotos
273 and such into shape isn't. */
275 /* State of the world while lowering. */
277 struct leh_state
279 /* What's "current" while constructing the eh region tree. These
280 correspond to variables of the same name in cfun->eh, which we
281 don't have easy access to. */
282 struct eh_region *cur_region;
283 struct eh_region *prev_try;
285 /* Processing of TRY_FINALLY requires a bit more state. This is
286 split out into a separate structure so that we don't have to
287 copy so much when processing other nodes. */
288 struct leh_tf_state *tf;
291 struct leh_tf_state
293 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
294 is the original TRY_FINALLY_EXPR. We need to retain this so that
295 outside_finally_tree can reliably reference the tree used in the
296 collect_finally_tree data structures. */
297 tree try_finally_expr;
298 tree *top_p;
300 /* The state outside this try_finally node. */
301 struct leh_state *outer;
303 /* The exception region created for it. */
304 struct eh_region *region;
306 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
307 that are seen to escape this TRY_FINALLY_EXPR node. */
308 struct goto_queue_node {
309 tree stmt;
310 tree repl_stmt;
311 tree cont_stmt;
312 int index;
313 } *goto_queue;
314 size_t goto_queue_size;
315 size_t goto_queue_active;
317 /* Pointer map to help in searching qoto_queue when it is large. */
318 struct pointer_map_t *goto_queue_map;
320 /* The set of unique labels seen as entries in the goto queue. */
321 VEC(tree,heap) *dest_array;
323 /* A label to be added at the end of the completed transformed
324 sequence. It will be set if may_fallthru was true *at one time*,
325 though subsequent transformations may have cleared that flag. */
326 tree fallthru_label;
328 /* A label that has been registered with except.c to be the
329 landing pad for this try block. */
330 tree eh_label;
332 /* True if it is possible to fall out the bottom of the try block.
333 Cleared if the fallthru is converted to a goto. */
334 bool may_fallthru;
336 /* True if any entry in goto_queue is a RETURN_EXPR. */
337 bool may_return;
339 /* True if the finally block can receive an exception edge.
340 Cleared if the exception case is handled by code duplication. */
341 bool may_throw;
344 static void lower_eh_filter (struct leh_state *, tree *);
345 static void lower_eh_constructs_1 (struct leh_state *, tree *);
347 /* Search for STMT in the goto queue. Return the replacement,
348 or null if the statement isn't in the queue. */
350 #define LARGE_GOTO_QUEUE 20
352 static tree
353 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
355 unsigned int i;
356 void **slot;
358 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
360 for (i = 0; i < tf->goto_queue_active; i++)
361 if (tf->goto_queue[i].stmt == stmt)
362 return tf->goto_queue[i].repl_stmt;
363 return NULL;
366 /* If we have a large number of entries in the goto_queue, create a
367 pointer map and use that for searching. */
369 if (!tf->goto_queue_map)
371 tf->goto_queue_map = pointer_map_create ();
372 for (i = 0; i < tf->goto_queue_active; i++)
374 slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
375 gcc_assert (*slot == NULL);
376 *slot = (void *) &tf->goto_queue[i];
380 slot = pointer_map_contains (tf->goto_queue_map, stmt);
381 if (slot != NULL)
382 return (((struct goto_queue_node *) *slot)->repl_stmt);
384 return NULL;
387 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
388 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
389 then we can just splat it in, otherwise we add the new stmts immediately
390 after the COND_EXPR and redirect. */
392 static void
393 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
394 tree_stmt_iterator *tsi)
396 tree new, one, label;
398 new = find_goto_replacement (tf, *tp);
399 if (!new)
400 return;
402 one = expr_only (new);
403 if (one && TREE_CODE (one) == GOTO_EXPR)
405 *tp = one;
406 return;
409 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
410 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
412 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
413 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
416 /* The real work of replace_goto_queue. Returns with TSI updated to
417 point to the next statement. */
419 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
421 static void
422 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
424 switch (TREE_CODE (t))
426 case GOTO_EXPR:
427 case RETURN_EXPR:
428 t = find_goto_replacement (tf, t);
429 if (t)
431 tsi_link_before (tsi, t, TSI_SAME_STMT);
432 tsi_delink (tsi);
433 return;
435 break;
437 case COND_EXPR:
438 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
439 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
440 break;
442 case TRY_FINALLY_EXPR:
443 case TRY_CATCH_EXPR:
444 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
445 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
446 break;
447 case CATCH_EXPR:
448 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
449 break;
450 case EH_FILTER_EXPR:
451 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
452 break;
454 case STATEMENT_LIST:
455 gcc_unreachable ();
457 default:
458 /* These won't have gotos in them. */
459 break;
462 tsi_next (tsi);
465 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
467 static void
468 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
470 tree_stmt_iterator i = tsi_start (t);
471 while (!tsi_end_p (i))
472 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
475 /* Replace all goto queue members. */
477 static void
478 replace_goto_queue (struct leh_tf_state *tf)
480 if (tf->goto_queue_active == 0)
481 return;
482 replace_goto_queue_stmt_list (*tf->top_p, tf);
485 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
486 node, and if so record that fact in the goto queue associated with that
487 try_finally node. */
489 static void
490 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
492 struct leh_tf_state *tf = state->tf;
493 struct goto_queue_node *q;
494 size_t active, size;
495 int index;
497 if (!tf)
498 return;
500 switch (TREE_CODE (stmt))
502 case GOTO_EXPR:
504 tree lab = GOTO_DESTINATION (stmt);
506 /* Computed and non-local gotos do not get processed. Given
507 their nature we can neither tell whether we've escaped the
508 finally block nor redirect them if we knew. */
509 if (TREE_CODE (lab) != LABEL_DECL)
510 return;
512 /* No need to record gotos that don't leave the try block. */
513 if (! outside_finally_tree (lab, tf->try_finally_expr))
514 return;
516 if (! tf->dest_array)
518 tf->dest_array = VEC_alloc (tree, heap, 10);
519 VEC_quick_push (tree, tf->dest_array, lab);
520 index = 0;
522 else
524 int n = VEC_length (tree, tf->dest_array);
525 for (index = 0; index < n; ++index)
526 if (VEC_index (tree, tf->dest_array, index) == lab)
527 break;
528 if (index == n)
529 VEC_safe_push (tree, heap, tf->dest_array, lab);
532 break;
534 case RETURN_EXPR:
535 tf->may_return = true;
536 index = -1;
537 break;
539 default:
540 gcc_unreachable ();
543 gcc_assert (!tf->goto_queue_map);
545 active = tf->goto_queue_active;
546 size = tf->goto_queue_size;
547 if (active >= size)
549 size = (size ? size * 2 : 32);
550 tf->goto_queue_size = size;
551 tf->goto_queue
552 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
555 q = &tf->goto_queue[active];
556 tf->goto_queue_active = active + 1;
558 memset (q, 0, sizeof (*q));
559 q->stmt = stmt;
560 q->index = index;
563 #ifdef ENABLE_CHECKING
564 /* We do not process SWITCH_EXPRs for now. As long as the original source
565 was in fact structured, and we've not yet done jump threading, then none
566 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
568 static void
569 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
571 struct leh_tf_state *tf = state->tf;
572 size_t i, n;
573 tree vec;
575 if (!tf)
576 return;
578 vec = SWITCH_LABELS (switch_expr);
579 n = TREE_VEC_LENGTH (vec);
581 for (i = 0; i < n; ++i)
583 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
584 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
587 #else
588 #define verify_norecord_switch_expr(state, switch_expr)
589 #endif
591 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
592 whatever is needed to finish the return. If MOD is non-null, insert it
593 before the new branch. RETURN_VALUE_P is a cache containing a temporary
594 variable to be used in manipulating the value returned from the function. */
596 static void
597 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
598 tree *return_value_p)
600 tree ret_expr = TREE_OPERAND (q->stmt, 0);
601 tree x;
603 if (ret_expr)
605 /* The nasty part about redirecting the return value is that the
606 return value itself is to be computed before the FINALLY block
607 is executed. e.g.
609 int x;
610 int foo (void)
612 x = 0;
613 try {
614 return x;
615 } finally {
616 x++;
620 should return 0, not 1. Arrange for this to happen by copying
621 computed the return value into a local temporary. This also
622 allows us to redirect multiple return statements through the
623 same destination block; whether this is a net win or not really
624 depends, I guess, but it does make generation of the switch in
625 lower_try_finally_switch easier. */
627 switch (TREE_CODE (ret_expr))
629 case RESULT_DECL:
630 if (!*return_value_p)
631 *return_value_p = ret_expr;
632 else
633 gcc_assert (*return_value_p == ret_expr);
634 q->cont_stmt = q->stmt;
635 break;
637 case GIMPLE_MODIFY_STMT:
639 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
640 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
642 if (!*return_value_p)
644 if (aggregate_value_p (TREE_TYPE (result),
645 TREE_TYPE (current_function_decl)))
646 /* If this function returns in memory, copy the argument
647 into the return slot now. Otherwise, we might need to
648 worry about magic return semantics, so we need to use a
649 temporary to hold the value until we're actually ready
650 to return. */
651 new = result;
652 else
653 new = create_tmp_var (TREE_TYPE (old), "rettmp");
654 *return_value_p = new;
656 else
657 new = *return_value_p;
659 x = build_gimple_modify_stmt (new, old);
660 append_to_statement_list (x, &q->repl_stmt);
662 if (new == result)
663 x = result;
664 else
665 x = build_gimple_modify_stmt (result, new);
666 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
669 default:
670 gcc_unreachable ();
673 else
675 /* If we don't return a value, all return statements are the same. */
676 q->cont_stmt = q->stmt;
679 if (mod)
680 append_to_statement_list (mod, &q->repl_stmt);
682 x = build1 (GOTO_EXPR, void_type_node, finlab);
683 append_to_statement_list (x, &q->repl_stmt);
686 /* Similar, but easier, for GOTO_EXPR. */
688 static void
689 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
691 tree x;
693 q->cont_stmt = q->stmt;
694 if (mod)
695 append_to_statement_list (mod, &q->repl_stmt);
697 x = build1 (GOTO_EXPR, void_type_node, finlab);
698 append_to_statement_list (x, &q->repl_stmt);
701 /* We want to transform
702 try { body; } catch { stuff; }
704 body; goto over; lab: stuff; over:
706 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
707 should be placed before the second operand, or NULL. OVER is
708 an existing label that should be put at the exit, or NULL. */
710 static void
711 frob_into_branch_around (tree *tp, tree lab, tree over)
713 tree x, op1;
715 op1 = TREE_OPERAND (*tp, 1);
716 *tp = TREE_OPERAND (*tp, 0);
718 if (block_may_fallthru (*tp))
720 if (!over)
721 over = create_artificial_label ();
722 x = build1 (GOTO_EXPR, void_type_node, over);
723 append_to_statement_list (x, tp);
726 if (lab)
728 x = build1 (LABEL_EXPR, void_type_node, lab);
729 append_to_statement_list (x, tp);
732 append_to_statement_list (op1, tp);
734 if (over)
736 x = build1 (LABEL_EXPR, void_type_node, over);
737 append_to_statement_list (x, tp);
741 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
742 Make sure to record all new labels found. */
744 static tree
745 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
747 tree region = NULL;
749 t = unsave_expr_now (t);
751 if (outer_state->tf)
752 region = outer_state->tf->try_finally_expr;
753 collect_finally_tree (t, region);
755 return t;
758 /* A subroutine of lower_try_finally. Create a fallthru label for
759 the given try_finally state. The only tricky bit here is that
760 we have to make sure to record the label in our outer context. */
762 static tree
763 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
765 tree label = tf->fallthru_label;
766 if (!label)
768 label = create_artificial_label ();
769 tf->fallthru_label = label;
770 if (tf->outer->tf)
771 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
773 return label;
776 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
777 returns non-null, then the language requires that the exception path out
778 of a try_finally be treated specially. To wit: the code within the
779 finally block may not itself throw an exception. We have two choices here.
780 First we can duplicate the finally block and wrap it in a must_not_throw
781 region. Second, we can generate code like
783 try {
784 finally_block;
785 } catch {
786 if (fintmp == eh_edge)
787 protect_cleanup_actions;
790 where "fintmp" is the temporary used in the switch statement generation
791 alternative considered below. For the nonce, we always choose the first
792 option.
794 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
796 static void
797 honor_protect_cleanup_actions (struct leh_state *outer_state,
798 struct leh_state *this_state,
799 struct leh_tf_state *tf)
801 tree protect_cleanup_actions, finally, x;
802 tree_stmt_iterator i;
803 bool finally_may_fallthru;
805 /* First check for nothing to do. */
806 if (lang_protect_cleanup_actions)
807 protect_cleanup_actions = lang_protect_cleanup_actions ();
808 else
809 protect_cleanup_actions = NULL;
811 finally = TREE_OPERAND (*tf->top_p, 1);
813 /* If the EH case of the finally block can fall through, this may be a
814 structure of the form
815 try {
816 try {
817 throw ...;
818 } cleanup {
819 try {
820 throw ...;
821 } catch (...) {
824 } catch (...) {
825 yyy;
827 E.g. with an inline destructor with an embedded try block. In this
828 case we must save the runtime EH data around the nested exception.
830 This complication means that any time the previous runtime data might
831 be used (via fallthru from the finally) we handle the eh case here,
832 whether or not protect_cleanup_actions is active. */
834 finally_may_fallthru = block_may_fallthru (finally);
835 if (!finally_may_fallthru && !protect_cleanup_actions)
836 return;
838 /* Duplicate the FINALLY block. Only need to do this for try-finally,
839 and not for cleanups. */
840 if (this_state)
841 finally = lower_try_finally_dup_block (finally, outer_state);
843 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
844 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
845 to be in an enclosing scope, but needs to be implemented at this level
846 to avoid a nesting violation (see wrap_temporary_cleanups in
847 cp/decl.c). Since it's logically at an outer level, we should call
848 terminate before we get to it, so strip it away before adding the
849 MUST_NOT_THROW filter. */
850 i = tsi_start (finally);
851 x = tsi_stmt (i);
852 if (protect_cleanup_actions
853 && TREE_CODE (x) == TRY_CATCH_EXPR
854 && TRY_CATCH_IS_CLEANUP (x))
856 tsi_link_before (&i, TREE_OPERAND (x, 0), TSI_SAME_STMT);
857 tsi_delink (&i);
860 /* Resume execution after the exception. Adding this now lets
861 lower_eh_filter not add unnecessary gotos, as it is clear that
862 we never fallthru from this copy of the finally block. */
863 if (finally_may_fallthru)
865 tree save_eptr, save_filt;
867 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
868 save_filt = create_tmp_var (integer_type_node, "save_filt");
870 i = tsi_start (finally);
871 x = build0 (EXC_PTR_EXPR, ptr_type_node);
872 x = build_gimple_modify_stmt (save_eptr, x);
873 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
875 x = build0 (FILTER_EXPR, integer_type_node);
876 x = build_gimple_modify_stmt (save_filt, x);
877 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
879 i = tsi_last (finally);
880 x = build0 (EXC_PTR_EXPR, ptr_type_node);
881 x = build_gimple_modify_stmt (x, save_eptr);
882 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
884 x = build0 (FILTER_EXPR, integer_type_node);
885 x = build_gimple_modify_stmt (x, save_filt);
886 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
888 x = build_resx (get_eh_region_number (tf->region));
889 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
892 /* Wrap the block with protect_cleanup_actions as the action. */
893 if (protect_cleanup_actions)
895 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
896 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
897 EH_FILTER_MUST_NOT_THROW (x) = 1;
898 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
899 lower_eh_filter (outer_state, &finally);
901 else
902 lower_eh_constructs_1 (outer_state, &finally);
904 /* Hook this up to the end of the existing try block. If we
905 previously fell through the end, we'll have to branch around.
906 This means adding a new goto, and adding it to the queue. */
908 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
910 if (tf->may_fallthru)
912 x = lower_try_finally_fallthru_label (tf);
913 x = build1 (GOTO_EXPR, void_type_node, x);
914 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
916 if (this_state)
917 maybe_record_in_goto_queue (this_state, x);
919 tf->may_fallthru = false;
922 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
923 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
924 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
926 /* Having now been handled, EH isn't to be considered with
927 the rest of the outgoing edges. */
928 tf->may_throw = false;
931 /* A subroutine of lower_try_finally. We have determined that there is
932 no fallthru edge out of the finally block. This means that there is
933 no outgoing edge corresponding to any incoming edge. Restructure the
934 try_finally node for this special case. */
936 static void
937 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
939 tree x, finally, lab, return_val;
940 struct goto_queue_node *q, *qe;
942 if (tf->may_throw)
943 lab = tf->eh_label;
944 else
945 lab = create_artificial_label ();
947 finally = TREE_OPERAND (*tf->top_p, 1);
948 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
950 x = build1 (LABEL_EXPR, void_type_node, lab);
951 append_to_statement_list (x, tf->top_p);
953 return_val = NULL;
954 q = tf->goto_queue;
955 qe = q + tf->goto_queue_active;
956 for (; q < qe; ++q)
957 if (q->index < 0)
958 do_return_redirection (q, lab, NULL, &return_val);
959 else
960 do_goto_redirection (q, lab, NULL);
962 replace_goto_queue (tf);
964 lower_eh_constructs_1 (state, &finally);
965 append_to_statement_list (finally, tf->top_p);
968 /* A subroutine of lower_try_finally. We have determined that there is
969 exactly one destination of the finally block. Restructure the
970 try_finally node for this special case. */
972 static void
973 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
975 struct goto_queue_node *q, *qe;
976 tree x, finally, finally_label;
978 finally = TREE_OPERAND (*tf->top_p, 1);
979 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
981 lower_eh_constructs_1 (state, &finally);
983 if (tf->may_throw)
985 /* Only reachable via the exception edge. Add the given label to
986 the head of the FINALLY block. Append a RESX at the end. */
988 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
989 append_to_statement_list (x, tf->top_p);
991 append_to_statement_list (finally, tf->top_p);
993 x = build_resx (get_eh_region_number (tf->region));
995 append_to_statement_list (x, tf->top_p);
997 return;
1000 if (tf->may_fallthru)
1002 /* Only reachable via the fallthru edge. Do nothing but let
1003 the two blocks run together; we'll fall out the bottom. */
1004 append_to_statement_list (finally, tf->top_p);
1005 return;
1008 finally_label = create_artificial_label ();
1009 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1010 append_to_statement_list (x, tf->top_p);
1012 append_to_statement_list (finally, tf->top_p);
1014 q = tf->goto_queue;
1015 qe = q + tf->goto_queue_active;
1017 if (tf->may_return)
1019 /* Reachable by return expressions only. Redirect them. */
1020 tree return_val = NULL;
1021 for (; q < qe; ++q)
1022 do_return_redirection (q, finally_label, NULL, &return_val);
1023 replace_goto_queue (tf);
1025 else
1027 /* Reachable by goto expressions only. Redirect them. */
1028 for (; q < qe; ++q)
1029 do_goto_redirection (q, finally_label, NULL);
1030 replace_goto_queue (tf);
1032 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1034 /* Reachable by goto to fallthru label only. Redirect it
1035 to the new label (already created, sadly), and do not
1036 emit the final branch out, or the fallthru label. */
1037 tf->fallthru_label = NULL;
1038 return;
1042 /* Reset the locus of the goto since we're moving
1043 goto to a different block which might be on a different line. */
1044 SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
1045 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1046 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1049 /* A subroutine of lower_try_finally. There are multiple edges incoming
1050 and outgoing from the finally block. Implement this by duplicating the
1051 finally block for every destination. */
1053 static void
1054 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1056 tree finally, new_stmt;
1057 tree x;
1059 finally = TREE_OPERAND (*tf->top_p, 1);
1060 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1062 new_stmt = NULL_TREE;
1064 if (tf->may_fallthru)
1066 x = lower_try_finally_dup_block (finally, state);
1067 lower_eh_constructs_1 (state, &x);
1068 append_to_statement_list (x, &new_stmt);
1070 x = lower_try_finally_fallthru_label (tf);
1071 x = build1 (GOTO_EXPR, void_type_node, x);
1072 append_to_statement_list (x, &new_stmt);
1075 if (tf->may_throw)
1077 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1078 append_to_statement_list (x, &new_stmt);
1080 x = lower_try_finally_dup_block (finally, state);
1081 lower_eh_constructs_1 (state, &x);
1082 append_to_statement_list (x, &new_stmt);
1084 x = build_resx (get_eh_region_number (tf->region));
1085 append_to_statement_list (x, &new_stmt);
1088 if (tf->goto_queue)
1090 struct goto_queue_node *q, *qe;
1091 tree return_val = NULL;
1092 int return_index, index;
1093 struct labels_s
1095 struct goto_queue_node *q;
1096 tree label;
1097 } *labels;
1099 return_index = VEC_length (tree, tf->dest_array);
1100 labels = XCNEWVEC (struct labels_s, return_index + 1);
1102 q = tf->goto_queue;
1103 qe = q + tf->goto_queue_active;
1104 for (; q < qe; q++)
1106 index = q->index < 0 ? return_index : q->index;
1108 if (!labels[index].q)
1109 labels[index].q = q;
1112 for (index = 0; index < return_index + 1; index++)
1114 tree lab;
1116 q = labels[index].q;
1117 if (! q)
1118 continue;
1120 lab = labels[index].label = create_artificial_label ();
1122 if (index == return_index)
1123 do_return_redirection (q, lab, NULL, &return_val);
1124 else
1125 do_goto_redirection (q, lab, NULL);
1127 x = build1 (LABEL_EXPR, void_type_node, lab);
1128 append_to_statement_list (x, &new_stmt);
1130 x = lower_try_finally_dup_block (finally, state);
1131 lower_eh_constructs_1 (state, &x);
1132 append_to_statement_list (x, &new_stmt);
1134 append_to_statement_list (q->cont_stmt, &new_stmt);
1135 maybe_record_in_goto_queue (state, q->cont_stmt);
1138 for (q = tf->goto_queue; q < qe; q++)
1140 tree lab;
1142 index = q->index < 0 ? return_index : q->index;
1144 if (labels[index].q == q)
1145 continue;
1147 lab = labels[index].label;
1149 if (index == return_index)
1150 do_return_redirection (q, lab, NULL, &return_val);
1151 else
1152 do_goto_redirection (q, lab, NULL);
1155 replace_goto_queue (tf);
1156 free (labels);
1159 /* Need to link new stmts after running replace_goto_queue due
1160 to not wanting to process the same goto stmts twice. */
1161 append_to_statement_list (new_stmt, tf->top_p);
1164 /* A subroutine of lower_try_finally. There are multiple edges incoming
1165 and outgoing from the finally block. Implement this by instrumenting
1166 each incoming edge and creating a switch statement at the end of the
1167 finally block that branches to the appropriate destination. */
1169 static void
1170 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1172 struct goto_queue_node *q, *qe;
1173 tree return_val = NULL;
1174 tree finally, finally_tmp, finally_label;
1175 int return_index, eh_index, fallthru_index;
1176 int nlabels, ndests, j, last_case_index;
1177 tree case_label_vec, switch_stmt, last_case, switch_body;
1178 tree x;
1180 /* Mash the TRY block to the head of the chain. */
1181 finally = TREE_OPERAND (*tf->top_p, 1);
1182 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1184 /* Lower the finally block itself. */
1185 lower_eh_constructs_1 (state, &finally);
1187 /* Prepare for switch statement generation. */
1188 nlabels = VEC_length (tree, tf->dest_array);
1189 return_index = nlabels;
1190 eh_index = return_index + tf->may_return;
1191 fallthru_index = eh_index + tf->may_throw;
1192 ndests = fallthru_index + tf->may_fallthru;
1194 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1195 finally_label = create_artificial_label ();
1197 case_label_vec = make_tree_vec (ndests);
1198 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1199 NULL_TREE, case_label_vec);
1200 switch_body = NULL;
1201 last_case = NULL;
1202 last_case_index = 0;
1204 /* Begin inserting code for getting to the finally block. Things
1205 are done in this order to correspond to the sequence the code is
1206 layed out. */
1208 if (tf->may_fallthru)
1210 x = build_gimple_modify_stmt (finally_tmp,
1211 build_int_cst (integer_type_node,
1212 fallthru_index));
1213 append_to_statement_list (x, tf->top_p);
1215 if (tf->may_throw)
1217 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1218 append_to_statement_list (x, tf->top_p);
1222 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1223 build_int_cst (NULL_TREE, fallthru_index), NULL,
1224 create_artificial_label ());
1225 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1226 last_case_index++;
1228 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1229 append_to_statement_list (x, &switch_body);
1231 x = lower_try_finally_fallthru_label (tf);
1232 x = build1 (GOTO_EXPR, void_type_node, x);
1233 append_to_statement_list (x, &switch_body);
1236 if (tf->may_throw)
1238 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1239 append_to_statement_list (x, tf->top_p);
1241 x = build_gimple_modify_stmt (finally_tmp,
1242 build_int_cst (integer_type_node,
1243 eh_index));
1244 append_to_statement_list (x, tf->top_p);
1246 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1247 build_int_cst (NULL_TREE, eh_index), NULL,
1248 create_artificial_label ());
1249 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1250 last_case_index++;
1252 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1253 append_to_statement_list (x, &switch_body);
1254 x = build_resx (get_eh_region_number (tf->region));
1255 append_to_statement_list (x, &switch_body);
1258 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1259 append_to_statement_list (x, tf->top_p);
1261 append_to_statement_list (finally, tf->top_p);
1263 /* Redirect each incoming goto edge. */
1264 q = tf->goto_queue;
1265 qe = q + tf->goto_queue_active;
1266 j = last_case_index + tf->may_return;
1267 for (; q < qe; ++q)
1269 tree mod;
1270 int switch_id, case_index;
1272 if (q->index < 0)
1274 mod = build_gimple_modify_stmt (finally_tmp,
1275 build_int_cst (integer_type_node,
1276 return_index));
1277 do_return_redirection (q, finally_label, mod, &return_val);
1278 switch_id = return_index;
1280 else
1282 mod = build_gimple_modify_stmt (finally_tmp,
1283 build_int_cst (integer_type_node,
1284 q->index));
1285 do_goto_redirection (q, finally_label, mod);
1286 switch_id = q->index;
1289 case_index = j + q->index;
1290 if (!TREE_VEC_ELT (case_label_vec, case_index))
1291 TREE_VEC_ELT (case_label_vec, case_index)
1292 = build3 (CASE_LABEL_EXPR, void_type_node,
1293 build_int_cst (NULL_TREE, switch_id), NULL,
1294 /* We store the cont_stmt in the
1295 CASE_LABEL, so that we can recover it
1296 in the loop below. We don't create
1297 the new label while walking the
1298 goto_queue because pointers don't
1299 offer a stable order. */
1300 q->cont_stmt);
1302 for (j = last_case_index; j < last_case_index + nlabels; j++)
1304 tree label;
1305 tree cont_stmt;
1307 last_case = TREE_VEC_ELT (case_label_vec, j);
1309 gcc_assert (last_case);
1311 cont_stmt = CASE_LABEL (last_case);
1313 label = create_artificial_label ();
1314 CASE_LABEL (last_case) = label;
1316 x = build1 (LABEL_EXPR, void_type_node, label);
1317 append_to_statement_list (x, &switch_body);
1318 append_to_statement_list (cont_stmt, &switch_body);
1319 maybe_record_in_goto_queue (state, cont_stmt);
1321 replace_goto_queue (tf);
1323 /* Make sure that the last case is the default label, as one is required.
1324 Then sort the labels, which is also required in GIMPLE. */
1325 CASE_LOW (last_case) = NULL;
1326 sort_case_labels (case_label_vec);
1328 /* Need to link switch_stmt after running replace_goto_queue due
1329 to not wanting to process the same goto stmts twice. */
1330 append_to_statement_list (switch_stmt, tf->top_p);
1331 append_to_statement_list (switch_body, tf->top_p);
1334 /* Decide whether or not we are going to duplicate the finally block.
1335 There are several considerations.
1337 First, if this is Java, then the finally block contains code
1338 written by the user. It has line numbers associated with it,
1339 so duplicating the block means it's difficult to set a breakpoint.
1340 Since controlling code generation via -g is verboten, we simply
1341 never duplicate code without optimization.
1343 Second, we'd like to prevent egregious code growth. One way to
1344 do this is to estimate the size of the finally block, multiply
1345 that by the number of copies we'd need to make, and compare against
1346 the estimate of the size of the switch machinery we'd have to add. */
1348 static bool
1349 decide_copy_try_finally (int ndests, tree finally)
1351 int f_estimate, sw_estimate;
1353 if (!optimize)
1354 return false;
1356 /* Finally estimate N times, plus N gotos. */
1357 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1358 f_estimate = (f_estimate + 1) * ndests;
1360 /* Switch statement (cost 10), N variable assignments, N gotos. */
1361 sw_estimate = 10 + 2 * ndests;
1363 /* Optimize for size clearly wants our best guess. */
1364 if (optimize_size)
1365 return f_estimate < sw_estimate;
1367 /* ??? These numbers are completely made up so far. */
1368 if (optimize > 1)
1369 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1370 else
1371 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1374 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1375 to a sequence of labels and blocks, plus the exception region trees
1376 that record all the magic. This is complicated by the need to
1377 arrange for the FINALLY block to be executed on all exits. */
1379 static void
1380 lower_try_finally (struct leh_state *state, tree *tp)
1382 struct leh_tf_state this_tf;
1383 struct leh_state this_state;
1384 int ndests;
1386 /* Process the try block. */
1388 memset (&this_tf, 0, sizeof (this_tf));
1389 this_tf.try_finally_expr = *tp;
1390 this_tf.top_p = tp;
1391 this_tf.outer = state;
1392 if (using_eh_for_cleanups_p)
1393 this_tf.region
1394 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1395 else
1396 this_tf.region = NULL;
1398 this_state.cur_region = this_tf.region;
1399 this_state.prev_try = state->prev_try;
1400 this_state.tf = &this_tf;
1402 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1404 /* Determine if the try block is escaped through the bottom. */
1405 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1407 /* Determine if any exceptions are possible within the try block. */
1408 if (using_eh_for_cleanups_p)
1409 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1410 if (this_tf.may_throw)
1412 this_tf.eh_label = create_artificial_label ();
1413 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1414 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1417 /* Determine how many edges (still) reach the finally block. Or rather,
1418 how many destinations are reached by the finally block. Use this to
1419 determine how we process the finally block itself. */
1421 ndests = VEC_length (tree, this_tf.dest_array);
1422 ndests += this_tf.may_fallthru;
1423 ndests += this_tf.may_return;
1424 ndests += this_tf.may_throw;
1426 /* If the FINALLY block is not reachable, dike it out. */
1427 if (ndests == 0)
1428 *tp = TREE_OPERAND (*tp, 0);
1430 /* If the finally block doesn't fall through, then any destination
1431 we might try to impose there isn't reached either. There may be
1432 some minor amount of cleanup and redirection still needed. */
1433 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1434 lower_try_finally_nofallthru (state, &this_tf);
1436 /* We can easily special-case redirection to a single destination. */
1437 else if (ndests == 1)
1438 lower_try_finally_onedest (state, &this_tf);
1440 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1441 lower_try_finally_copy (state, &this_tf);
1442 else
1443 lower_try_finally_switch (state, &this_tf);
1445 /* If someone requested we add a label at the end of the transformed
1446 block, do so. */
1447 if (this_tf.fallthru_label)
1449 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1450 append_to_statement_list (x, tp);
1453 VEC_free (tree, heap, this_tf.dest_array);
1454 if (this_tf.goto_queue)
1455 free (this_tf.goto_queue);
1456 if (this_tf.goto_queue_map)
1457 pointer_map_destroy (this_tf.goto_queue_map);
1460 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1461 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1462 exception region trees that record all the magic. */
1464 static void
1465 lower_catch (struct leh_state *state, tree *tp)
1467 struct eh_region *try_region;
1468 struct leh_state this_state;
1469 tree_stmt_iterator i;
1470 tree out_label;
1472 try_region = gen_eh_region_try (state->cur_region);
1473 this_state.cur_region = try_region;
1474 this_state.prev_try = try_region;
1475 this_state.tf = state->tf;
1477 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1479 if (!get_eh_region_may_contain_throw (try_region))
1481 *tp = TREE_OPERAND (*tp, 0);
1482 return;
1485 out_label = NULL;
1486 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1488 struct eh_region *catch_region;
1489 tree catch, x, eh_label;
1491 catch = tsi_stmt (i);
1492 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1494 this_state.cur_region = catch_region;
1495 this_state.prev_try = state->prev_try;
1496 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1498 eh_label = create_artificial_label ();
1499 set_eh_region_tree_label (catch_region, eh_label);
1501 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1502 tsi_link_before (&i, x, TSI_SAME_STMT);
1504 if (block_may_fallthru (CATCH_BODY (catch)))
1506 if (!out_label)
1507 out_label = create_artificial_label ();
1509 x = build1 (GOTO_EXPR, void_type_node, out_label);
1510 append_to_statement_list (x, &CATCH_BODY (catch));
1513 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1514 tsi_delink (&i);
1517 frob_into_branch_around (tp, NULL, out_label);
1520 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1521 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1522 region trees that record all the magic. */
1524 static void
1525 lower_eh_filter (struct leh_state *state, tree *tp)
1527 struct leh_state this_state;
1528 struct eh_region *this_region;
1529 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1530 tree eh_label;
1532 if (EH_FILTER_MUST_NOT_THROW (inner))
1533 this_region = gen_eh_region_must_not_throw (state->cur_region);
1534 else
1535 this_region = gen_eh_region_allowed (state->cur_region,
1536 EH_FILTER_TYPES (inner));
1537 this_state = *state;
1538 this_state.cur_region = this_region;
1539 /* For must not throw regions any cleanup regions inside it
1540 can't reach outer catch regions. */
1541 if (EH_FILTER_MUST_NOT_THROW (inner))
1542 this_state.prev_try = NULL;
1544 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1546 if (!get_eh_region_may_contain_throw (this_region))
1548 *tp = TREE_OPERAND (*tp, 0);
1549 return;
1552 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1553 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1555 eh_label = create_artificial_label ();
1556 set_eh_region_tree_label (this_region, eh_label);
1558 frob_into_branch_around (tp, eh_label, NULL);
1561 /* Implement a cleanup expression. This is similar to try-finally,
1562 except that we only execute the cleanup block for exception edges. */
1564 static void
1565 lower_cleanup (struct leh_state *state, tree *tp)
1567 struct leh_state this_state;
1568 struct eh_region *this_region;
1569 struct leh_tf_state fake_tf;
1571 /* If not using eh, then exception-only cleanups are no-ops. */
1572 if (!flag_exceptions)
1574 *tp = TREE_OPERAND (*tp, 0);
1575 lower_eh_constructs_1 (state, tp);
1576 return;
1579 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1580 this_state = *state;
1581 this_state.cur_region = this_region;
1583 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1585 if (!get_eh_region_may_contain_throw (this_region))
1587 *tp = TREE_OPERAND (*tp, 0);
1588 return;
1591 /* Build enough of a try-finally state so that we can reuse
1592 honor_protect_cleanup_actions. */
1593 memset (&fake_tf, 0, sizeof (fake_tf));
1594 fake_tf.top_p = tp;
1595 fake_tf.outer = state;
1596 fake_tf.region = this_region;
1597 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1598 fake_tf.may_throw = true;
1600 fake_tf.eh_label = create_artificial_label ();
1601 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1603 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1605 if (fake_tf.may_throw)
1607 /* In this case honor_protect_cleanup_actions had nothing to do,
1608 and we should process this normally. */
1609 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1610 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1612 else
1614 /* In this case honor_protect_cleanup_actions did nearly all of
1615 the work. All we have left is to append the fallthru_label. */
1617 *tp = TREE_OPERAND (*tp, 0);
1618 if (fake_tf.fallthru_label)
1620 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1621 append_to_statement_list (x, tp);
1626 /* Main loop for lowering eh constructs. */
1628 static void
1629 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1631 tree_stmt_iterator i;
1632 tree t = *tp;
1634 switch (TREE_CODE (t))
1636 case COND_EXPR:
1637 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1638 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1639 break;
1641 case CALL_EXPR:
1642 /* Look for things that can throw exceptions, and record them. */
1643 if (state->cur_region && tree_could_throw_p (t))
1645 record_stmt_eh_region (state->cur_region, t);
1646 note_eh_region_may_contain_throw (state->cur_region);
1648 break;
1650 case GIMPLE_MODIFY_STMT:
1651 /* Look for things that can throw exceptions, and record them. */
1652 if (state->cur_region && tree_could_throw_p (t))
1654 record_stmt_eh_region (state->cur_region, t);
1655 note_eh_region_may_contain_throw (state->cur_region);
1657 break;
1659 case GOTO_EXPR:
1660 case RETURN_EXPR:
1661 maybe_record_in_goto_queue (state, t);
1662 break;
1663 case SWITCH_EXPR:
1664 verify_norecord_switch_expr (state, t);
1665 break;
1667 case TRY_FINALLY_EXPR:
1668 lower_try_finally (state, tp);
1669 break;
1671 case TRY_CATCH_EXPR:
1672 i = tsi_start (TREE_OPERAND (t, 1));
1673 switch (TREE_CODE (tsi_stmt (i)))
1675 case CATCH_EXPR:
1676 lower_catch (state, tp);
1677 break;
1678 case EH_FILTER_EXPR:
1679 lower_eh_filter (state, tp);
1680 break;
1681 default:
1682 lower_cleanup (state, tp);
1683 break;
1685 break;
1687 case STATEMENT_LIST:
1688 for (i = tsi_start (t); !tsi_end_p (i); )
1690 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1691 t = tsi_stmt (i);
1692 if (TREE_CODE (t) == STATEMENT_LIST)
1694 tsi_link_before (&i, t, TSI_SAME_STMT);
1695 tsi_delink (&i);
1697 else
1698 tsi_next (&i);
1700 break;
1702 default:
1703 /* A type, a decl, or some kind of statement that we're not
1704 interested in. Don't walk them. */
1705 break;
1709 static unsigned int
1710 lower_eh_constructs (void)
1712 struct leh_state null_state;
1713 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1715 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1717 collect_finally_tree (*tp, NULL);
1719 memset (&null_state, 0, sizeof (null_state));
1720 lower_eh_constructs_1 (&null_state, tp);
1722 htab_delete (finally_tree);
1724 collect_eh_region_array ();
1725 return 0;
1728 struct gimple_opt_pass pass_lower_eh =
1731 GIMPLE_PASS,
1732 "eh", /* name */
1733 NULL, /* gate */
1734 lower_eh_constructs, /* execute */
1735 NULL, /* sub */
1736 NULL, /* next */
1737 0, /* static_pass_number */
1738 TV_TREE_EH, /* tv_id */
1739 PROP_gimple_lcf, /* properties_required */
1740 PROP_gimple_leh, /* properties_provided */
1741 0, /* properties_destroyed */
1742 0, /* todo_flags_start */
1743 TODO_dump_func /* todo_flags_finish */
1748 /* Construct EH edges for STMT. */
1750 static void
1751 make_eh_edge (struct eh_region *region, void *data)
1753 tree stmt, lab;
1754 basic_block src, dst;
1756 stmt = (tree) data;
1757 lab = get_eh_region_tree_label (region);
1759 src = bb_for_stmt (stmt);
1760 dst = label_to_block (lab);
1762 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1765 void
1766 make_eh_edges (tree stmt)
1768 int region_nr;
1769 bool is_resx;
1771 if (TREE_CODE (stmt) == RESX_EXPR)
1773 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1774 is_resx = true;
1776 else
1778 region_nr = lookup_stmt_eh_region (stmt);
1779 if (region_nr < 0)
1780 return;
1781 is_resx = false;
1784 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1787 static bool mark_eh_edge_found_error;
1789 /* Mark edge make_eh_edge would create for given region by setting it aux
1790 field, output error if something goes wrong. */
1791 static void
1792 mark_eh_edge (struct eh_region *region, void *data)
1794 tree stmt, lab;
1795 basic_block src, dst;
1796 edge e;
1798 stmt = (tree) data;
1799 lab = get_eh_region_tree_label (region);
1801 src = bb_for_stmt (stmt);
1802 dst = label_to_block (lab);
1804 e = find_edge (src, dst);
1805 if (!e)
1807 error ("EH edge %i->%i is missing", src->index, dst->index);
1808 mark_eh_edge_found_error = true;
1810 else if (!(e->flags & EDGE_EH))
1812 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1813 mark_eh_edge_found_error = true;
1815 else if (e->aux)
1817 /* ??? might not be mistake. */
1818 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1819 mark_eh_edge_found_error = true;
1821 else
1822 e->aux = (void *)1;
1825 /* Verify that BB containing stmt as last stmt has precisely the edges
1826 make_eh_edges would create. */
1827 bool
1828 verify_eh_edges (tree stmt)
1830 int region_nr;
1831 bool is_resx;
1832 basic_block bb = bb_for_stmt (stmt);
1833 edge_iterator ei;
1834 edge e;
1836 FOR_EACH_EDGE (e, ei, bb->succs)
1837 gcc_assert (!e->aux);
1838 mark_eh_edge_found_error = false;
1839 if (TREE_CODE (stmt) == RESX_EXPR)
1841 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1842 is_resx = true;
1844 else
1846 region_nr = lookup_stmt_eh_region (stmt);
1847 if (region_nr < 0)
1849 FOR_EACH_EDGE (e, ei, bb->succs)
1850 if (e->flags & EDGE_EH)
1852 error ("BB %i can not throw but has EH edges", bb->index);
1853 return true;
1855 return false;
1857 if (!tree_could_throw_p (stmt))
1859 error ("BB %i last statement has incorrectly set region", bb->index);
1860 return true;
1862 is_resx = false;
1865 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1866 FOR_EACH_EDGE (e, ei, bb->succs)
1868 if ((e->flags & EDGE_EH) && !e->aux)
1870 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1871 mark_eh_edge_found_error = true;
1872 return true;
1874 e->aux = NULL;
1876 return mark_eh_edge_found_error;
1880 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1881 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1882 This routine expects only GIMPLE lhs or rhs input. */
1884 bool
1885 tree_could_trap_p (tree expr)
1887 enum tree_code code = TREE_CODE (expr);
1888 bool honor_nans = false;
1889 bool honor_snans = false;
1890 bool fp_operation = false;
1891 bool honor_trapv = false;
1892 tree t, base;
1894 if (TREE_CODE_CLASS (code) == tcc_comparison
1895 || TREE_CODE_CLASS (code) == tcc_unary
1896 || TREE_CODE_CLASS (code) == tcc_binary)
1898 t = TREE_TYPE (expr);
1899 if (COMPARISON_CLASS_P (expr))
1900 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
1901 else
1902 fp_operation = FLOAT_TYPE_P (t);
1903 if (fp_operation)
1905 honor_nans = flag_trapping_math && !flag_finite_math_only;
1906 honor_snans = flag_signaling_nans != 0;
1908 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1909 honor_trapv = true;
1912 restart:
1913 switch (code)
1915 case TARGET_MEM_REF:
1916 /* For TARGET_MEM_REFs use the information based on the original
1917 reference. */
1918 expr = TMR_ORIGINAL (expr);
1919 code = TREE_CODE (expr);
1920 goto restart;
1922 case COMPONENT_REF:
1923 case REALPART_EXPR:
1924 case IMAGPART_EXPR:
1925 case BIT_FIELD_REF:
1926 case VIEW_CONVERT_EXPR:
1927 case WITH_SIZE_EXPR:
1928 expr = TREE_OPERAND (expr, 0);
1929 code = TREE_CODE (expr);
1930 goto restart;
1932 case ARRAY_RANGE_REF:
1933 base = TREE_OPERAND (expr, 0);
1934 if (tree_could_trap_p (base))
1935 return true;
1937 if (TREE_THIS_NOTRAP (expr))
1938 return false;
1940 return !range_in_array_bounds_p (expr);
1942 case ARRAY_REF:
1943 base = TREE_OPERAND (expr, 0);
1944 if (tree_could_trap_p (base))
1945 return true;
1947 if (TREE_THIS_NOTRAP (expr))
1948 return false;
1950 return !in_array_bounds_p (expr);
1952 case INDIRECT_REF:
1953 case ALIGN_INDIRECT_REF:
1954 case MISALIGNED_INDIRECT_REF:
1955 return !TREE_THIS_NOTRAP (expr);
1957 case ASM_EXPR:
1958 return TREE_THIS_VOLATILE (expr);
1960 case TRUNC_DIV_EXPR:
1961 case CEIL_DIV_EXPR:
1962 case FLOOR_DIV_EXPR:
1963 case ROUND_DIV_EXPR:
1964 case EXACT_DIV_EXPR:
1965 case CEIL_MOD_EXPR:
1966 case FLOOR_MOD_EXPR:
1967 case ROUND_MOD_EXPR:
1968 case TRUNC_MOD_EXPR:
1969 case RDIV_EXPR:
1970 if (honor_snans || honor_trapv)
1971 return true;
1972 if (fp_operation)
1973 return flag_trapping_math;
1974 t = TREE_OPERAND (expr, 1);
1975 if (!TREE_CONSTANT (t) || integer_zerop (t))
1976 return true;
1977 return false;
1979 case LT_EXPR:
1980 case LE_EXPR:
1981 case GT_EXPR:
1982 case GE_EXPR:
1983 case LTGT_EXPR:
1984 /* Some floating point comparisons may trap. */
1985 return honor_nans;
1987 case EQ_EXPR:
1988 case NE_EXPR:
1989 case UNORDERED_EXPR:
1990 case ORDERED_EXPR:
1991 case UNLT_EXPR:
1992 case UNLE_EXPR:
1993 case UNGT_EXPR:
1994 case UNGE_EXPR:
1995 case UNEQ_EXPR:
1996 return honor_snans;
1998 case CONVERT_EXPR:
1999 case FIX_TRUNC_EXPR:
2000 /* Conversion of floating point might trap. */
2001 return honor_nans;
2003 case NEGATE_EXPR:
2004 case ABS_EXPR:
2005 case CONJ_EXPR:
2006 /* These operations don't trap with floating point. */
2007 if (honor_trapv)
2008 return true;
2009 return false;
2011 case PLUS_EXPR:
2012 case MINUS_EXPR:
2013 case MULT_EXPR:
2014 /* Any floating arithmetic may trap. */
2015 if (fp_operation && flag_trapping_math)
2016 return true;
2017 if (honor_trapv)
2018 return true;
2019 return false;
2021 case CALL_EXPR:
2022 t = get_callee_fndecl (expr);
2023 /* Assume that calls to weak functions may trap. */
2024 if (!t || !DECL_P (t) || DECL_WEAK (t))
2025 return true;
2026 return false;
2028 default:
2029 /* Any floating arithmetic may trap. */
2030 if (fp_operation && flag_trapping_math)
2031 return true;
2032 return false;
2036 bool
2037 tree_could_throw_p (tree t)
2039 if (!flag_exceptions)
2040 return false;
2041 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2043 if (flag_non_call_exceptions
2044 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2045 return true;
2046 t = GIMPLE_STMT_OPERAND (t, 1);
2049 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2050 t = TREE_OPERAND (t, 0);
2051 if (TREE_CODE (t) == CALL_EXPR)
2052 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2053 if (flag_non_call_exceptions)
2054 return tree_could_trap_p (t);
2055 return false;
2058 bool
2059 tree_can_throw_internal (const_tree stmt)
2061 int region_nr;
2062 bool is_resx = false;
2064 if (TREE_CODE (stmt) == RESX_EXPR)
2065 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2066 else
2067 region_nr = lookup_stmt_eh_region (stmt);
2068 if (region_nr < 0)
2069 return false;
2070 return can_throw_internal_1 (region_nr, is_resx);
2073 bool
2074 tree_can_throw_external (tree stmt)
2076 int region_nr;
2077 bool is_resx = false;
2079 if (TREE_CODE (stmt) == RESX_EXPR)
2080 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2081 else
2082 region_nr = lookup_stmt_eh_region (stmt);
2083 if (region_nr < 0)
2084 return tree_could_throw_p (stmt);
2085 else
2086 return can_throw_external_1 (region_nr, is_resx);
2089 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2090 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2091 in the table if it should be in there. Return TRUE if a replacement was
2092 done that my require an EH edge purge. */
2094 bool
2095 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2097 int region_nr = lookup_stmt_eh_region (old_stmt);
2099 if (region_nr >= 0)
2101 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2103 if (new_stmt == old_stmt && new_stmt_could_throw)
2104 return false;
2106 remove_stmt_from_eh_region (old_stmt);
2107 if (new_stmt_could_throw)
2109 add_stmt_to_eh_region (new_stmt, region_nr);
2110 return false;
2112 else
2113 return true;
2116 return false;
2119 /* Returns TRUE if oneh and twoh are exception handlers (op 1 of
2120 TRY_CATCH_EXPR or TRY_FINALLY_EXPR that are similar enough to be
2121 considered the same. Currently this only handles handlers consisting of
2122 a single call, as that's the important case for C++: a destructor call
2123 for a particular object showing up in multiple handlers. */
2125 static bool
2126 same_handler_p (tree oneh, tree twoh)
2128 tree_stmt_iterator i;
2129 tree ones, twos;
2130 int ai;
2132 i = tsi_start (oneh);
2133 if (!tsi_one_before_end_p (i))
2134 return false;
2135 ones = tsi_stmt (i);
2137 i = tsi_start (twoh);
2138 if (!tsi_one_before_end_p (i))
2139 return false;
2140 twos = tsi_stmt (i);
2142 if (TREE_CODE (ones) != CALL_EXPR
2143 || TREE_CODE (twos) != CALL_EXPR
2144 || !operand_equal_p (CALL_EXPR_FN (ones), CALL_EXPR_FN (twos), 0)
2145 || call_expr_nargs (ones) != call_expr_nargs (twos))
2146 return false;
2148 for (ai = 0; ai < call_expr_nargs (ones); ++ai)
2149 if (!operand_equal_p (CALL_EXPR_ARG (ones, ai),
2150 CALL_EXPR_ARG (twos, ai), 0))
2151 return false;
2153 return true;
2156 /* Optimize
2157 try { A() } finally { try { ~B() } catch { ~A() } }
2158 try { ... } finally { ~A() }
2159 into
2160 try { A() } catch { ~B() }
2161 try { ~B() ... } finally { ~A() }
2163 This occurs frequently in C++, where A is a local variable and B is a
2164 temporary used in the initializer for A. */
2166 static void
2167 optimize_double_finally (tree one, tree two)
2169 tree oneh;
2170 tree_stmt_iterator i;
2172 i = tsi_start (TREE_OPERAND (one, 1));
2173 if (!tsi_one_before_end_p (i))
2174 return;
2176 oneh = tsi_stmt (i);
2177 if (TREE_CODE (oneh) != TRY_CATCH_EXPR)
2178 return;
2180 if (same_handler_p (TREE_OPERAND (oneh, 1), TREE_OPERAND (two, 1)))
2182 tree b = TREE_OPERAND (oneh, 0);
2183 TREE_OPERAND (one, 1) = b;
2184 TREE_SET_CODE (one, TRY_CATCH_EXPR);
2186 i = tsi_start (TREE_OPERAND (two, 0));
2187 tsi_link_before (&i, unsave_expr_now (b), TSI_SAME_STMT);
2191 /* Perform EH refactoring optimizations that are simpler to do when code
2192 flow has been lowered but EH structures haven't. */
2194 static void
2195 refactor_eh_r (tree t)
2197 tailrecurse:
2198 switch (TREE_CODE (t))
2200 case TRY_FINALLY_EXPR:
2201 case TRY_CATCH_EXPR:
2202 refactor_eh_r (TREE_OPERAND (t, 0));
2203 t = TREE_OPERAND (t, 1);
2204 goto tailrecurse;
2206 case CATCH_EXPR:
2207 t = CATCH_BODY (t);
2208 goto tailrecurse;
2210 case EH_FILTER_EXPR:
2211 t = EH_FILTER_FAILURE (t);
2212 goto tailrecurse;
2214 case STATEMENT_LIST:
2216 tree_stmt_iterator i;
2217 tree one = NULL_TREE, two = NULL_TREE;
2218 /* Try to refactor double try/finally. */
2219 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2221 one = two;
2222 two = tsi_stmt (i);
2223 if (one && two
2224 && TREE_CODE (one) == TRY_FINALLY_EXPR
2225 && TREE_CODE (two) == TRY_FINALLY_EXPR)
2226 optimize_double_finally (one, two);
2227 if (one)
2228 refactor_eh_r (one);
2230 if (two)
2232 t = two;
2233 goto tailrecurse;
2236 break;
2238 default:
2239 /* A type, a decl, or some kind of statement that we're not
2240 interested in. Don't walk them. */
2241 break;
2245 static unsigned
2246 refactor_eh (void)
2248 refactor_eh_r (DECL_SAVED_TREE (current_function_decl));
2249 return 0;
2252 struct gimple_opt_pass pass_refactor_eh =
2255 GIMPLE_PASS,
2256 "ehopt", /* name */
2257 NULL, /* gate */
2258 refactor_eh, /* execute */
2259 NULL, /* sub */
2260 NULL, /* next */
2261 0, /* static_pass_number */
2262 TV_TREE_EH, /* tv_id */
2263 PROP_gimple_lcf, /* properties_required */
2264 0, /* properties_provided */
2265 0, /* properties_destroyed */
2266 0, /* todo_flags_start */
2267 TODO_dump_func /* todo_flags_finish */