Update concepts branch to revision 131834
[official-gcc.git] / gcc / tree-eh.c
blob965acce74906dc15f758927bcf5e194d95132bb8
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
3 Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
40 #include "pointer-set.h"
43 /* Nonzero if we are using EH to handle cleanups. */
44 static int using_eh_for_cleanups_p = 0;
46 void
47 using_eh_for_cleanups (void)
49 using_eh_for_cleanups_p = 1;
52 /* Misc functions used in this file. */
54 /* Compare and hash for any structure which begins with a canonical
55 pointer. Assumes all pointers are interchangeable, which is sort
56 of already assumed by gcc elsewhere IIRC. */
58 static int
59 struct_ptr_eq (const void *a, const void *b)
61 const void * const * x = (const void * const *) a;
62 const void * const * y = (const void * const *) b;
63 return *x == *y;
66 static hashval_t
67 struct_ptr_hash (const void *a)
69 const void * const * x = (const void * const *) a;
70 return (size_t)*x >> 4;
74 /* Remember and lookup EH region data for arbitrary statements.
75 Really this means any statement that could_throw_p. We could
76 stuff this information into the stmt_ann data structure, but:
78 (1) We absolutely rely on this information being kept until
79 we get to rtl. Once we're done with lowering here, if we lose
80 the information there's no way to recover it!
82 (2) There are many more statements that *cannot* throw as
83 compared to those that can. We should be saving some amount
84 of space by only allocating memory for those that can throw. */
86 static void
87 record_stmt_eh_region (struct eh_region *region, tree t)
89 if (!region)
90 return;
92 add_stmt_to_eh_region (t, get_eh_region_number (region));
95 void
96 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
98 struct throw_stmt_node *n;
99 void **slot;
101 gcc_assert (num >= 0);
102 gcc_assert (TREE_CODE (t) != RESX_EXPR);
104 n = GGC_NEW (struct throw_stmt_node);
105 n->stmt = t;
106 n->region_nr = num;
108 if (!get_eh_throw_stmt_table (ifun))
109 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
110 struct_ptr_eq,
111 ggc_free));
113 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
114 gcc_assert (!*slot);
115 *slot = n;
118 void
119 add_stmt_to_eh_region (tree t, int num)
121 add_stmt_to_eh_region_fn (cfun, t, num);
124 bool
125 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
127 struct throw_stmt_node dummy;
128 void **slot;
130 if (!get_eh_throw_stmt_table (ifun))
131 return false;
133 dummy.stmt = t;
134 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
135 NO_INSERT);
136 if (slot)
138 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
139 return true;
141 else
142 return false;
145 bool
146 remove_stmt_from_eh_region (tree t)
148 return remove_stmt_from_eh_region_fn (cfun, t);
152 lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
154 struct throw_stmt_node *p, n;
156 if (!get_eh_throw_stmt_table (ifun))
157 return -2;
159 /* The CONST_CAST is okay because we don't modify n.stmt throughout
160 its scope, or the scope of p. */
161 n.stmt = CONST_CAST_TREE (t);
162 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
163 &n);
165 return (p ? p->region_nr : -1);
169 lookup_stmt_eh_region (const_tree t)
171 /* We can get called from initialized data when -fnon-call-exceptions
172 is on; prevent crash. */
173 if (!cfun)
174 return -1;
175 return lookup_stmt_eh_region_fn (cfun, t);
179 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
180 nodes and LABEL_DECL nodes. We will use this during the second phase to
181 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
183 struct finally_tree_node
185 tree child, parent;
188 /* Note that this table is *not* marked GTY. It is short-lived. */
189 static htab_t finally_tree;
191 static void
192 record_in_finally_tree (tree child, tree parent)
194 struct finally_tree_node *n;
195 void **slot;
197 n = XNEW (struct finally_tree_node);
198 n->child = child;
199 n->parent = parent;
201 slot = htab_find_slot (finally_tree, n, INSERT);
202 gcc_assert (!*slot);
203 *slot = n;
206 static void
207 collect_finally_tree (tree t, tree region)
209 tailrecurse:
210 switch (TREE_CODE (t))
212 case LABEL_EXPR:
213 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
214 break;
216 case TRY_FINALLY_EXPR:
217 record_in_finally_tree (t, region);
218 collect_finally_tree (TREE_OPERAND (t, 0), t);
219 t = TREE_OPERAND (t, 1);
220 goto tailrecurse;
222 case TRY_CATCH_EXPR:
223 collect_finally_tree (TREE_OPERAND (t, 0), region);
224 t = TREE_OPERAND (t, 1);
225 goto tailrecurse;
227 case CATCH_EXPR:
228 t = CATCH_BODY (t);
229 goto tailrecurse;
231 case EH_FILTER_EXPR:
232 t = EH_FILTER_FAILURE (t);
233 goto tailrecurse;
235 case STATEMENT_LIST:
237 tree_stmt_iterator i;
238 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
239 collect_finally_tree (tsi_stmt (i), region);
241 break;
243 default:
244 /* A type, a decl, or some kind of statement that we're not
245 interested in. Don't walk them. */
246 break;
250 /* Use the finally tree to determine if a jump from START to TARGET
251 would leave the try_finally node that START lives in. */
253 static bool
254 outside_finally_tree (tree start, tree target)
256 struct finally_tree_node n, *p;
260 n.child = start;
261 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
262 if (!p)
263 return true;
264 start = p->parent;
266 while (start != target);
268 return false;
271 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
272 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
273 The eh region creation is straight-forward, but frobbing all the gotos
274 and such into shape isn't. */
276 /* State of the world while lowering. */
278 struct leh_state
280 /* What's "current" while constructing the eh region tree. These
281 correspond to variables of the same name in cfun->eh, which we
282 don't have easy access to. */
283 struct eh_region *cur_region;
284 struct eh_region *prev_try;
286 /* Processing of TRY_FINALLY requires a bit more state. This is
287 split out into a separate structure so that we don't have to
288 copy so much when processing other nodes. */
289 struct leh_tf_state *tf;
292 struct leh_tf_state
294 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
295 is the original TRY_FINALLY_EXPR. We need to retain this so that
296 outside_finally_tree can reliably reference the tree used in the
297 collect_finally_tree data structures. */
298 tree try_finally_expr;
299 tree *top_p;
301 /* The state outside this try_finally node. */
302 struct leh_state *outer;
304 /* The exception region created for it. */
305 struct eh_region *region;
307 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
308 that are seen to escape this TRY_FINALLY_EXPR node. */
309 struct goto_queue_node {
310 tree stmt;
311 tree repl_stmt;
312 tree cont_stmt;
313 int index;
314 } *goto_queue;
315 size_t goto_queue_size;
316 size_t goto_queue_active;
318 /* Pointer map to help in searching goto_queue when it is large. */
319 struct pointer_map_t *goto_queue_map;
321 /* The set of unique labels seen as entries in the goto queue. */
322 VEC(tree,heap) *dest_array;
324 /* A label to be added at the end of the completed transformed
325 sequence. It will be set if may_fallthru was true *at one time*,
326 though subsequent transformations may have cleared that flag. */
327 tree fallthru_label;
329 /* A label that has been registered with except.c to be the
330 landing pad for this try block. */
331 tree eh_label;
333 /* True if it is possible to fall out the bottom of the try block.
334 Cleared if the fallthru is converted to a goto. */
335 bool may_fallthru;
337 /* True if any entry in goto_queue is a RETURN_EXPR. */
338 bool may_return;
340 /* True if the finally block can receive an exception edge.
341 Cleared if the exception case is handled by code duplication. */
342 bool may_throw;
345 static void lower_eh_filter (struct leh_state *, tree *);
346 static void lower_eh_constructs_1 (struct leh_state *, tree *);
348 /* Search for STMT in the goto queue. Return the replacement,
349 or null if the statement isn't in the queue. */
351 #define LARGE_GOTO_QUEUE 20
353 static tree
354 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
356 unsigned int i;
357 void **slot;
359 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
361 for (i = 0; i < tf->goto_queue_active; i++)
362 if (tf->goto_queue[i].stmt == stmt)
363 return tf->goto_queue[i].repl_stmt;
364 return NULL;
367 /* If we have a large number of entries in the goto_queue, create a
368 pointer map and use that for searching. */
370 if (!tf->goto_queue_map)
372 tf->goto_queue_map = pointer_map_create ();
373 for (i = 0; i < tf->goto_queue_active; i++)
375 slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
376 gcc_assert (*slot == NULL);
377 *slot = (void *) &tf->goto_queue[i];
381 slot = pointer_map_contains (tf->goto_queue_map, stmt);
382 if (slot != NULL)
383 return (((struct goto_queue_node *) *slot)->repl_stmt);
385 return NULL;
388 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
389 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
390 then we can just splat it in, otherwise we add the new stmts immediately
391 after the COND_EXPR and redirect. */
393 static void
394 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
395 tree_stmt_iterator *tsi)
397 tree new, one, label;
399 new = find_goto_replacement (tf, *tp);
400 if (!new)
401 return;
403 one = expr_only (new);
404 if (one && TREE_CODE (one) == GOTO_EXPR)
406 *tp = one;
407 return;
410 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
411 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
413 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
414 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
417 /* The real work of replace_goto_queue. Returns with TSI updated to
418 point to the next statement. */
420 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
422 static void
423 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
425 switch (TREE_CODE (t))
427 case GOTO_EXPR:
428 case RETURN_EXPR:
429 t = find_goto_replacement (tf, t);
430 if (t)
432 tsi_link_before (tsi, t, TSI_SAME_STMT);
433 tsi_delink (tsi);
434 return;
436 break;
438 case COND_EXPR:
439 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
440 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
441 break;
443 case TRY_FINALLY_EXPR:
444 case TRY_CATCH_EXPR:
445 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
446 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
447 break;
448 case CATCH_EXPR:
449 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
450 break;
451 case EH_FILTER_EXPR:
452 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
453 break;
455 case STATEMENT_LIST:
456 gcc_unreachable ();
458 default:
459 /* These won't have gotos in them. */
460 break;
463 tsi_next (tsi);
466 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
468 static void
469 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
471 tree_stmt_iterator i = tsi_start (t);
472 while (!tsi_end_p (i))
473 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
476 /* Replace all goto queue members. */
478 static void
479 replace_goto_queue (struct leh_tf_state *tf)
481 if (tf->goto_queue_active == 0)
482 return;
483 replace_goto_queue_stmt_list (*tf->top_p, tf);
486 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
487 node, and if so record that fact in the goto queue associated with that
488 try_finally node. */
490 static void
491 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
493 struct leh_tf_state *tf = state->tf;
494 struct goto_queue_node *q;
495 size_t active, size;
496 int index;
498 if (!tf)
499 return;
501 switch (TREE_CODE (stmt))
503 case GOTO_EXPR:
505 tree lab = GOTO_DESTINATION (stmt);
507 /* Computed and non-local gotos do not get processed. Given
508 their nature we can neither tell whether we've escaped the
509 finally block nor redirect them if we knew. */
510 if (TREE_CODE (lab) != LABEL_DECL)
511 return;
513 /* No need to record gotos that don't leave the try block. */
514 if (! outside_finally_tree (lab, tf->try_finally_expr))
515 return;
517 if (! tf->dest_array)
519 tf->dest_array = VEC_alloc (tree, heap, 10);
520 VEC_quick_push (tree, tf->dest_array, lab);
521 index = 0;
523 else
525 int n = VEC_length (tree, tf->dest_array);
526 for (index = 0; index < n; ++index)
527 if (VEC_index (tree, tf->dest_array, index) == lab)
528 break;
529 if (index == n)
530 VEC_safe_push (tree, heap, tf->dest_array, lab);
533 break;
535 case RETURN_EXPR:
536 tf->may_return = true;
537 index = -1;
538 break;
540 default:
541 gcc_unreachable ();
544 gcc_assert (!tf->goto_queue_map);
546 active = tf->goto_queue_active;
547 size = tf->goto_queue_size;
548 if (active >= size)
550 size = (size ? size * 2 : 32);
551 tf->goto_queue_size = size;
552 tf->goto_queue
553 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
556 q = &tf->goto_queue[active];
557 tf->goto_queue_active = active + 1;
559 memset (q, 0, sizeof (*q));
560 q->stmt = stmt;
561 q->index = index;
564 #ifdef ENABLE_CHECKING
565 /* We do not process SWITCH_EXPRs for now. As long as the original source
566 was in fact structured, and we've not yet done jump threading, then none
567 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
569 static void
570 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
572 struct leh_tf_state *tf = state->tf;
573 size_t i, n;
574 tree vec;
576 if (!tf)
577 return;
579 vec = SWITCH_LABELS (switch_expr);
580 n = TREE_VEC_LENGTH (vec);
582 for (i = 0; i < n; ++i)
584 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
585 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
588 #else
589 #define verify_norecord_switch_expr(state, switch_expr)
590 #endif
592 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
593 whatever is needed to finish the return. If MOD is non-null, insert it
594 before the new branch. RETURN_VALUE_P is a cache containing a temporary
595 variable to be used in manipulating the value returned from the function. */
597 static void
598 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
599 tree *return_value_p)
601 tree ret_expr = TREE_OPERAND (q->stmt, 0);
602 tree x;
604 if (ret_expr)
606 /* The nasty part about redirecting the return value is that the
607 return value itself is to be computed before the FINALLY block
608 is executed. e.g.
610 int x;
611 int foo (void)
613 x = 0;
614 try {
615 return x;
616 } finally {
617 x++;
621 should return 0, not 1. Arrange for this to happen by copying
622 computed the return value into a local temporary. This also
623 allows us to redirect multiple return statements through the
624 same destination block; whether this is a net win or not really
625 depends, I guess, but it does make generation of the switch in
626 lower_try_finally_switch easier. */
628 switch (TREE_CODE (ret_expr))
630 case RESULT_DECL:
631 if (!*return_value_p)
632 *return_value_p = ret_expr;
633 else
634 gcc_assert (*return_value_p == ret_expr);
635 q->cont_stmt = q->stmt;
636 break;
638 case GIMPLE_MODIFY_STMT:
640 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
641 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
643 if (!*return_value_p)
645 if (aggregate_value_p (TREE_TYPE (result),
646 TREE_TYPE (current_function_decl)))
647 /* If this function returns in memory, copy the argument
648 into the return slot now. Otherwise, we might need to
649 worry about magic return semantics, so we need to use a
650 temporary to hold the value until we're actually ready
651 to return. */
652 new = result;
653 else
654 new = create_tmp_var (TREE_TYPE (old), "rettmp");
655 *return_value_p = new;
657 else
658 new = *return_value_p;
660 x = build_gimple_modify_stmt (new, old);
661 append_to_statement_list (x, &q->repl_stmt);
663 if (new == result)
664 x = result;
665 else
666 x = build_gimple_modify_stmt (result, new);
667 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
670 default:
671 gcc_unreachable ();
674 else
676 /* If we don't return a value, all return statements are the same. */
677 q->cont_stmt = q->stmt;
680 if (mod)
681 append_to_statement_list (mod, &q->repl_stmt);
683 x = build1 (GOTO_EXPR, void_type_node, finlab);
684 append_to_statement_list (x, &q->repl_stmt);
687 /* Similar, but easier, for GOTO_EXPR. */
689 static void
690 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
692 tree x;
694 q->cont_stmt = q->stmt;
695 if (mod)
696 append_to_statement_list (mod, &q->repl_stmt);
698 x = build1 (GOTO_EXPR, void_type_node, finlab);
699 append_to_statement_list (x, &q->repl_stmt);
702 /* We want to transform
703 try { body; } catch { stuff; }
705 body; goto over; lab: stuff; over:
707 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
708 should be placed before the second operand, or NULL. OVER is
709 an existing label that should be put at the exit, or NULL. */
711 static void
712 frob_into_branch_around (tree *tp, tree lab, tree over)
714 tree x, op1;
716 op1 = TREE_OPERAND (*tp, 1);
717 *tp = TREE_OPERAND (*tp, 0);
719 if (block_may_fallthru (*tp))
721 if (!over)
722 over = create_artificial_label ();
723 x = build1 (GOTO_EXPR, void_type_node, over);
724 append_to_statement_list (x, tp);
727 if (lab)
729 x = build1 (LABEL_EXPR, void_type_node, lab);
730 append_to_statement_list (x, tp);
733 append_to_statement_list (op1, tp);
735 if (over)
737 x = build1 (LABEL_EXPR, void_type_node, over);
738 append_to_statement_list (x, tp);
742 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
743 Make sure to record all new labels found. */
745 static tree
746 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
748 tree region = NULL;
750 t = unsave_expr_now (t);
752 if (outer_state->tf)
753 region = outer_state->tf->try_finally_expr;
754 collect_finally_tree (t, region);
756 return t;
759 /* A subroutine of lower_try_finally. Create a fallthru label for
760 the given try_finally state. The only tricky bit here is that
761 we have to make sure to record the label in our outer context. */
763 static tree
764 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
766 tree label = tf->fallthru_label;
767 if (!label)
769 label = create_artificial_label ();
770 tf->fallthru_label = label;
771 if (tf->outer->tf)
772 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
774 return label;
777 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
778 returns non-null, then the language requires that the exception path out
779 of a try_finally be treated specially. To wit: the code within the
780 finally block may not itself throw an exception. We have two choices here.
781 First we can duplicate the finally block and wrap it in a must_not_throw
782 region. Second, we can generate code like
784 try {
785 finally_block;
786 } catch {
787 if (fintmp == eh_edge)
788 protect_cleanup_actions;
791 where "fintmp" is the temporary used in the switch statement generation
792 alternative considered below. For the nonce, we always choose the first
793 option.
795 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
797 static void
798 honor_protect_cleanup_actions (struct leh_state *outer_state,
799 struct leh_state *this_state,
800 struct leh_tf_state *tf)
802 tree protect_cleanup_actions, finally, x;
803 tree_stmt_iterator i;
804 bool finally_may_fallthru;
806 /* First check for nothing to do. */
807 if (lang_protect_cleanup_actions)
808 protect_cleanup_actions = lang_protect_cleanup_actions ();
809 else
810 protect_cleanup_actions = NULL;
812 finally = TREE_OPERAND (*tf->top_p, 1);
814 /* If the EH case of the finally block can fall through, this may be a
815 structure of the form
816 try {
817 try {
818 throw ...;
819 } cleanup {
820 try {
821 throw ...;
822 } catch (...) {
825 } catch (...) {
826 yyy;
828 E.g. with an inline destructor with an embedded try block. In this
829 case we must save the runtime EH data around the nested exception.
831 This complication means that any time the previous runtime data might
832 be used (via fallthru from the finally) we handle the eh case here,
833 whether or not protect_cleanup_actions is active. */
835 finally_may_fallthru = block_may_fallthru (finally);
836 if (!finally_may_fallthru && !protect_cleanup_actions)
837 return;
839 /* Duplicate the FINALLY block. Only need to do this for try-finally,
840 and not for cleanups. */
841 if (this_state)
842 finally = lower_try_finally_dup_block (finally, outer_state);
844 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
845 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
846 to be in an enclosing scope, but needs to be implemented at this level
847 to avoid a nesting violation (see wrap_temporary_cleanups in
848 cp/decl.c). Since it's logically at an outer level, we should call
849 terminate before we get to it, so strip it away before adding the
850 MUST_NOT_THROW filter. */
851 i = tsi_start (finally);
852 x = tsi_stmt (i);
853 if (protect_cleanup_actions
854 && TREE_CODE (x) == TRY_CATCH_EXPR
855 && TRY_CATCH_IS_CLEANUP (x))
857 tsi_link_before (&i, TREE_OPERAND (x, 0), TSI_SAME_STMT);
858 tsi_delink (&i);
861 /* Resume execution after the exception. Adding this now lets
862 lower_eh_filter not add unnecessary gotos, as it is clear that
863 we never fallthru from this copy of the finally block. */
864 if (finally_may_fallthru)
866 tree save_eptr, save_filt;
868 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
869 save_filt = create_tmp_var (integer_type_node, "save_filt");
871 i = tsi_start (finally);
872 x = build0 (EXC_PTR_EXPR, ptr_type_node);
873 x = build_gimple_modify_stmt (save_eptr, x);
874 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
876 x = build0 (FILTER_EXPR, integer_type_node);
877 x = build_gimple_modify_stmt (save_filt, x);
878 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
880 i = tsi_last (finally);
881 x = build0 (EXC_PTR_EXPR, ptr_type_node);
882 x = build_gimple_modify_stmt (x, save_eptr);
883 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
885 x = build0 (FILTER_EXPR, integer_type_node);
886 x = build_gimple_modify_stmt (x, save_filt);
887 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
889 x = build_resx (get_eh_region_number (tf->region));
890 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
893 /* Wrap the block with protect_cleanup_actions as the action. */
894 if (protect_cleanup_actions)
896 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
897 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
898 EH_FILTER_MUST_NOT_THROW (x) = 1;
899 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
900 lower_eh_filter (outer_state, &finally);
902 else
903 lower_eh_constructs_1 (outer_state, &finally);
905 /* Hook this up to the end of the existing try block. If we
906 previously fell through the end, we'll have to branch around.
907 This means adding a new goto, and adding it to the queue. */
909 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
911 if (tf->may_fallthru)
913 x = lower_try_finally_fallthru_label (tf);
914 x = build1 (GOTO_EXPR, void_type_node, x);
915 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
917 if (this_state)
918 maybe_record_in_goto_queue (this_state, x);
920 tf->may_fallthru = false;
923 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
924 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
925 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
927 /* Having now been handled, EH isn't to be considered with
928 the rest of the outgoing edges. */
929 tf->may_throw = false;
932 /* A subroutine of lower_try_finally. We have determined that there is
933 no fallthru edge out of the finally block. This means that there is
934 no outgoing edge corresponding to any incoming edge. Restructure the
935 try_finally node for this special case. */
937 static void
938 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
940 tree x, finally, lab, return_val;
941 struct goto_queue_node *q, *qe;
943 if (tf->may_throw)
944 lab = tf->eh_label;
945 else
946 lab = create_artificial_label ();
948 finally = TREE_OPERAND (*tf->top_p, 1);
949 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
951 x = build1 (LABEL_EXPR, void_type_node, lab);
952 append_to_statement_list (x, tf->top_p);
954 return_val = NULL;
955 q = tf->goto_queue;
956 qe = q + tf->goto_queue_active;
957 for (; q < qe; ++q)
958 if (q->index < 0)
959 do_return_redirection (q, lab, NULL, &return_val);
960 else
961 do_goto_redirection (q, lab, NULL);
963 replace_goto_queue (tf);
965 lower_eh_constructs_1 (state, &finally);
966 append_to_statement_list (finally, tf->top_p);
969 /* A subroutine of lower_try_finally. We have determined that there is
970 exactly one destination of the finally block. Restructure the
971 try_finally node for this special case. */
973 static void
974 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
976 struct goto_queue_node *q, *qe;
977 tree x, finally, finally_label;
979 finally = TREE_OPERAND (*tf->top_p, 1);
980 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
982 lower_eh_constructs_1 (state, &finally);
984 if (tf->may_throw)
986 /* Only reachable via the exception edge. Add the given label to
987 the head of the FINALLY block. Append a RESX at the end. */
989 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
990 append_to_statement_list (x, tf->top_p);
992 append_to_statement_list (finally, tf->top_p);
994 x = build_resx (get_eh_region_number (tf->region));
996 append_to_statement_list (x, tf->top_p);
998 return;
1001 if (tf->may_fallthru)
1003 /* Only reachable via the fallthru edge. Do nothing but let
1004 the two blocks run together; we'll fall out the bottom. */
1005 append_to_statement_list (finally, tf->top_p);
1006 return;
1009 finally_label = create_artificial_label ();
1010 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1011 append_to_statement_list (x, tf->top_p);
1013 append_to_statement_list (finally, tf->top_p);
1015 q = tf->goto_queue;
1016 qe = q + tf->goto_queue_active;
1018 if (tf->may_return)
1020 /* Reachable by return expressions only. Redirect them. */
1021 tree return_val = NULL;
1022 for (; q < qe; ++q)
1023 do_return_redirection (q, finally_label, NULL, &return_val);
1024 replace_goto_queue (tf);
1026 else
1028 /* Reachable by goto expressions only. Redirect them. */
1029 for (; q < qe; ++q)
1030 do_goto_redirection (q, finally_label, NULL);
1031 replace_goto_queue (tf);
1033 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1035 /* Reachable by goto to fallthru label only. Redirect it
1036 to the new label (already created, sadly), and do not
1037 emit the final branch out, or the fallthru label. */
1038 tf->fallthru_label = NULL;
1039 return;
1043 /* Reset the locus of the goto since we're moving
1044 goto to a different block which might be on a different line. */
1045 SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
1046 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1047 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1050 /* A subroutine of lower_try_finally. There are multiple edges incoming
1051 and outgoing from the finally block. Implement this by duplicating the
1052 finally block for every destination. */
1054 static void
1055 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1057 tree finally, new_stmt;
1058 tree x;
1060 finally = TREE_OPERAND (*tf->top_p, 1);
1061 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1063 new_stmt = NULL_TREE;
1065 if (tf->may_fallthru)
1067 x = lower_try_finally_dup_block (finally, state);
1068 lower_eh_constructs_1 (state, &x);
1069 append_to_statement_list (x, &new_stmt);
1071 x = lower_try_finally_fallthru_label (tf);
1072 x = build1 (GOTO_EXPR, void_type_node, x);
1073 append_to_statement_list (x, &new_stmt);
1076 if (tf->may_throw)
1078 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1079 append_to_statement_list (x, &new_stmt);
1081 x = lower_try_finally_dup_block (finally, state);
1082 lower_eh_constructs_1 (state, &x);
1083 append_to_statement_list (x, &new_stmt);
1085 x = build_resx (get_eh_region_number (tf->region));
1086 append_to_statement_list (x, &new_stmt);
1089 if (tf->goto_queue)
1091 struct goto_queue_node *q, *qe;
1092 tree return_val = NULL;
1093 int return_index, index;
1094 struct labels_s
1096 struct goto_queue_node *q;
1097 tree label;
1098 } *labels;
1100 return_index = VEC_length (tree, tf->dest_array);
1101 labels = XCNEWVEC (struct labels_s, return_index + 1);
1103 q = tf->goto_queue;
1104 qe = q + tf->goto_queue_active;
1105 for (; q < qe; q++)
1107 index = q->index < 0 ? return_index : q->index;
1109 if (!labels[index].q)
1110 labels[index].q = q;
1113 for (index = 0; index < return_index + 1; index++)
1115 tree lab;
1117 q = labels[index].q;
1118 if (! q)
1119 continue;
1121 lab = labels[index].label = create_artificial_label ();
1123 if (index == return_index)
1124 do_return_redirection (q, lab, NULL, &return_val);
1125 else
1126 do_goto_redirection (q, lab, NULL);
1128 x = build1 (LABEL_EXPR, void_type_node, lab);
1129 append_to_statement_list (x, &new_stmt);
1131 x = lower_try_finally_dup_block (finally, state);
1132 lower_eh_constructs_1 (state, &x);
1133 append_to_statement_list (x, &new_stmt);
1135 append_to_statement_list (q->cont_stmt, &new_stmt);
1136 maybe_record_in_goto_queue (state, q->cont_stmt);
1139 for (q = tf->goto_queue; q < qe; q++)
1141 tree lab;
1143 index = q->index < 0 ? return_index : q->index;
1145 if (labels[index].q == q)
1146 continue;
1148 lab = labels[index].label;
1150 if (index == return_index)
1151 do_return_redirection (q, lab, NULL, &return_val);
1152 else
1153 do_goto_redirection (q, lab, NULL);
1156 replace_goto_queue (tf);
1157 free (labels);
1160 /* Need to link new stmts after running replace_goto_queue due
1161 to not wanting to process the same goto stmts twice. */
1162 append_to_statement_list (new_stmt, tf->top_p);
1165 /* A subroutine of lower_try_finally. There are multiple edges incoming
1166 and outgoing from the finally block. Implement this by instrumenting
1167 each incoming edge and creating a switch statement at the end of the
1168 finally block that branches to the appropriate destination. */
1170 static void
1171 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1173 struct goto_queue_node *q, *qe;
1174 tree return_val = NULL;
1175 tree finally, finally_tmp, finally_label;
1176 int return_index, eh_index, fallthru_index;
1177 int nlabels, ndests, j, last_case_index;
1178 tree case_label_vec, switch_stmt, last_case, switch_body;
1179 tree x;
1181 /* Mash the TRY block to the head of the chain. */
1182 finally = TREE_OPERAND (*tf->top_p, 1);
1183 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1185 /* Lower the finally block itself. */
1186 lower_eh_constructs_1 (state, &finally);
1188 /* Prepare for switch statement generation. */
1189 nlabels = VEC_length (tree, tf->dest_array);
1190 return_index = nlabels;
1191 eh_index = return_index + tf->may_return;
1192 fallthru_index = eh_index + tf->may_throw;
1193 ndests = fallthru_index + tf->may_fallthru;
1195 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1196 finally_label = create_artificial_label ();
1198 case_label_vec = make_tree_vec (ndests);
1199 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1200 NULL_TREE, case_label_vec);
1201 switch_body = NULL;
1202 last_case = NULL;
1203 last_case_index = 0;
1205 /* Begin inserting code for getting to the finally block. Things
1206 are done in this order to correspond to the sequence the code is
1207 layed out. */
1209 if (tf->may_fallthru)
1211 x = build_gimple_modify_stmt (finally_tmp,
1212 build_int_cst (integer_type_node,
1213 fallthru_index));
1214 append_to_statement_list (x, tf->top_p);
1216 if (tf->may_throw)
1218 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1219 append_to_statement_list (x, tf->top_p);
1223 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1224 build_int_cst (NULL_TREE, fallthru_index), NULL,
1225 create_artificial_label ());
1226 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1227 last_case_index++;
1229 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1230 append_to_statement_list (x, &switch_body);
1232 x = lower_try_finally_fallthru_label (tf);
1233 x = build1 (GOTO_EXPR, void_type_node, x);
1234 append_to_statement_list (x, &switch_body);
1237 if (tf->may_throw)
1239 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1240 append_to_statement_list (x, tf->top_p);
1242 x = build_gimple_modify_stmt (finally_tmp,
1243 build_int_cst (integer_type_node,
1244 eh_index));
1245 append_to_statement_list (x, tf->top_p);
1247 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1248 build_int_cst (NULL_TREE, eh_index), NULL,
1249 create_artificial_label ());
1250 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1251 last_case_index++;
1253 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1254 append_to_statement_list (x, &switch_body);
1255 x = build_resx (get_eh_region_number (tf->region));
1256 append_to_statement_list (x, &switch_body);
1259 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1260 append_to_statement_list (x, tf->top_p);
1262 append_to_statement_list (finally, tf->top_p);
1264 /* Redirect each incoming goto edge. */
1265 q = tf->goto_queue;
1266 qe = q + tf->goto_queue_active;
1267 j = last_case_index + tf->may_return;
1268 for (; q < qe; ++q)
1270 tree mod;
1271 int switch_id, case_index;
1273 if (q->index < 0)
1275 mod = build_gimple_modify_stmt (finally_tmp,
1276 build_int_cst (integer_type_node,
1277 return_index));
1278 do_return_redirection (q, finally_label, mod, &return_val);
1279 switch_id = return_index;
1281 else
1283 mod = build_gimple_modify_stmt (finally_tmp,
1284 build_int_cst (integer_type_node,
1285 q->index));
1286 do_goto_redirection (q, finally_label, mod);
1287 switch_id = q->index;
1290 case_index = j + q->index;
1291 if (!TREE_VEC_ELT (case_label_vec, case_index))
1292 TREE_VEC_ELT (case_label_vec, case_index)
1293 = build3 (CASE_LABEL_EXPR, void_type_node,
1294 build_int_cst (NULL_TREE, switch_id), NULL,
1295 /* We store the cont_stmt in the
1296 CASE_LABEL, so that we can recover it
1297 in the loop below. We don't create
1298 the new label while walking the
1299 goto_queue because pointers don't
1300 offer a stable order. */
1301 q->cont_stmt);
1303 for (j = last_case_index; j < last_case_index + nlabels; j++)
1305 tree label;
1306 tree cont_stmt;
1308 last_case = TREE_VEC_ELT (case_label_vec, j);
1310 gcc_assert (last_case);
1312 cont_stmt = CASE_LABEL (last_case);
1314 label = create_artificial_label ();
1315 CASE_LABEL (last_case) = label;
1317 x = build1 (LABEL_EXPR, void_type_node, label);
1318 append_to_statement_list (x, &switch_body);
1319 append_to_statement_list (cont_stmt, &switch_body);
1320 maybe_record_in_goto_queue (state, cont_stmt);
1322 replace_goto_queue (tf);
1324 /* Make sure that the last case is the default label, as one is required.
1325 Then sort the labels, which is also required in GIMPLE. */
1326 CASE_LOW (last_case) = NULL;
1327 sort_case_labels (case_label_vec);
1329 /* Need to link switch_stmt after running replace_goto_queue due
1330 to not wanting to process the same goto stmts twice. */
1331 append_to_statement_list (switch_stmt, tf->top_p);
1332 append_to_statement_list (switch_body, tf->top_p);
1335 /* Decide whether or not we are going to duplicate the finally block.
1336 There are several considerations.
1338 First, if this is Java, then the finally block contains code
1339 written by the user. It has line numbers associated with it,
1340 so duplicating the block means it's difficult to set a breakpoint.
1341 Since controlling code generation via -g is verboten, we simply
1342 never duplicate code without optimization.
1344 Second, we'd like to prevent egregious code growth. One way to
1345 do this is to estimate the size of the finally block, multiply
1346 that by the number of copies we'd need to make, and compare against
1347 the estimate of the size of the switch machinery we'd have to add. */
1349 static bool
1350 decide_copy_try_finally (int ndests, tree finally)
1352 int f_estimate, sw_estimate;
1354 if (!optimize)
1355 return false;
1357 /* Finally estimate N times, plus N gotos. */
1358 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1359 f_estimate = (f_estimate + 1) * ndests;
1361 /* Switch statement (cost 10), N variable assignments, N gotos. */
1362 sw_estimate = 10 + 2 * ndests;
1364 /* Optimize for size clearly wants our best guess. */
1365 if (optimize_size)
1366 return f_estimate < sw_estimate;
1368 /* ??? These numbers are completely made up so far. */
1369 if (optimize > 1)
1370 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1371 else
1372 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1375 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1376 to a sequence of labels and blocks, plus the exception region trees
1377 that record all the magic. This is complicated by the need to
1378 arrange for the FINALLY block to be executed on all exits. */
1380 static void
1381 lower_try_finally (struct leh_state *state, tree *tp)
1383 struct leh_tf_state this_tf;
1384 struct leh_state this_state;
1385 int ndests;
1387 /* Process the try block. */
1389 memset (&this_tf, 0, sizeof (this_tf));
1390 this_tf.try_finally_expr = *tp;
1391 this_tf.top_p = tp;
1392 this_tf.outer = state;
1393 if (using_eh_for_cleanups_p)
1394 this_tf.region
1395 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1396 else
1397 this_tf.region = NULL;
1399 this_state.cur_region = this_tf.region;
1400 this_state.prev_try = state->prev_try;
1401 this_state.tf = &this_tf;
1403 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1405 /* Determine if the try block is escaped through the bottom. */
1406 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1408 /* Determine if any exceptions are possible within the try block. */
1409 if (using_eh_for_cleanups_p)
1410 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1411 if (this_tf.may_throw)
1413 this_tf.eh_label = create_artificial_label ();
1414 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1415 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1418 /* Determine how many edges (still) reach the finally block. Or rather,
1419 how many destinations are reached by the finally block. Use this to
1420 determine how we process the finally block itself. */
1422 ndests = VEC_length (tree, this_tf.dest_array);
1423 ndests += this_tf.may_fallthru;
1424 ndests += this_tf.may_return;
1425 ndests += this_tf.may_throw;
1427 /* If the FINALLY block is not reachable, dike it out. */
1428 if (ndests == 0)
1429 *tp = TREE_OPERAND (*tp, 0);
1431 /* If the finally block doesn't fall through, then any destination
1432 we might try to impose there isn't reached either. There may be
1433 some minor amount of cleanup and redirection still needed. */
1434 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1435 lower_try_finally_nofallthru (state, &this_tf);
1437 /* We can easily special-case redirection to a single destination. */
1438 else if (ndests == 1)
1439 lower_try_finally_onedest (state, &this_tf);
1441 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1442 lower_try_finally_copy (state, &this_tf);
1443 else
1444 lower_try_finally_switch (state, &this_tf);
1446 /* If someone requested we add a label at the end of the transformed
1447 block, do so. */
1448 if (this_tf.fallthru_label)
1450 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1451 append_to_statement_list (x, tp);
1454 VEC_free (tree, heap, this_tf.dest_array);
1455 if (this_tf.goto_queue)
1456 free (this_tf.goto_queue);
1457 if (this_tf.goto_queue_map)
1458 pointer_map_destroy (this_tf.goto_queue_map);
1461 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1462 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1463 exception region trees that record all the magic. */
1465 static void
1466 lower_catch (struct leh_state *state, tree *tp)
1468 struct eh_region *try_region;
1469 struct leh_state this_state;
1470 tree_stmt_iterator i;
1471 tree out_label;
1473 try_region = gen_eh_region_try (state->cur_region);
1474 this_state.cur_region = try_region;
1475 this_state.prev_try = try_region;
1476 this_state.tf = state->tf;
1478 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1480 if (!get_eh_region_may_contain_throw (try_region))
1482 *tp = TREE_OPERAND (*tp, 0);
1483 return;
1486 out_label = NULL;
1487 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1489 struct eh_region *catch_region;
1490 tree catch, x, eh_label;
1492 catch = tsi_stmt (i);
1493 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1495 this_state.cur_region = catch_region;
1496 this_state.prev_try = state->prev_try;
1497 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1499 eh_label = create_artificial_label ();
1500 set_eh_region_tree_label (catch_region, eh_label);
1502 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1503 tsi_link_before (&i, x, TSI_SAME_STMT);
1505 if (block_may_fallthru (CATCH_BODY (catch)))
1507 if (!out_label)
1508 out_label = create_artificial_label ();
1510 x = build1 (GOTO_EXPR, void_type_node, out_label);
1511 append_to_statement_list (x, &CATCH_BODY (catch));
1514 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1515 tsi_delink (&i);
1518 frob_into_branch_around (tp, NULL, out_label);
1521 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1522 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1523 region trees that record all the magic. */
1525 static void
1526 lower_eh_filter (struct leh_state *state, tree *tp)
1528 struct leh_state this_state;
1529 struct eh_region *this_region;
1530 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1531 tree eh_label;
1533 if (EH_FILTER_MUST_NOT_THROW (inner))
1534 this_region = gen_eh_region_must_not_throw (state->cur_region);
1535 else
1536 this_region = gen_eh_region_allowed (state->cur_region,
1537 EH_FILTER_TYPES (inner));
1538 this_state = *state;
1539 this_state.cur_region = this_region;
1540 /* For must not throw regions any cleanup regions inside it
1541 can't reach outer catch regions. */
1542 if (EH_FILTER_MUST_NOT_THROW (inner))
1543 this_state.prev_try = NULL;
1545 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1547 if (!get_eh_region_may_contain_throw (this_region))
1549 *tp = TREE_OPERAND (*tp, 0);
1550 return;
1553 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1554 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1556 eh_label = create_artificial_label ();
1557 set_eh_region_tree_label (this_region, eh_label);
1559 frob_into_branch_around (tp, eh_label, NULL);
1562 /* Implement a cleanup expression. This is similar to try-finally,
1563 except that we only execute the cleanup block for exception edges. */
1565 static void
1566 lower_cleanup (struct leh_state *state, tree *tp)
1568 struct leh_state this_state;
1569 struct eh_region *this_region;
1570 struct leh_tf_state fake_tf;
1572 /* If not using eh, then exception-only cleanups are no-ops. */
1573 if (!flag_exceptions)
1575 *tp = TREE_OPERAND (*tp, 0);
1576 lower_eh_constructs_1 (state, tp);
1577 return;
1580 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1581 this_state = *state;
1582 this_state.cur_region = this_region;
1584 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1586 if (!get_eh_region_may_contain_throw (this_region))
1588 *tp = TREE_OPERAND (*tp, 0);
1589 return;
1592 /* Build enough of a try-finally state so that we can reuse
1593 honor_protect_cleanup_actions. */
1594 memset (&fake_tf, 0, sizeof (fake_tf));
1595 fake_tf.top_p = tp;
1596 fake_tf.outer = state;
1597 fake_tf.region = this_region;
1598 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1599 fake_tf.may_throw = true;
1601 fake_tf.eh_label = create_artificial_label ();
1602 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1604 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1606 if (fake_tf.may_throw)
1608 /* In this case honor_protect_cleanup_actions had nothing to do,
1609 and we should process this normally. */
1610 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1611 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1613 else
1615 /* In this case honor_protect_cleanup_actions did nearly all of
1616 the work. All we have left is to append the fallthru_label. */
1618 *tp = TREE_OPERAND (*tp, 0);
1619 if (fake_tf.fallthru_label)
1621 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1622 append_to_statement_list (x, tp);
1627 /* Main loop for lowering eh constructs. */
1629 static void
1630 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1632 tree_stmt_iterator i;
1633 tree t = *tp;
1635 switch (TREE_CODE (t))
1637 case COND_EXPR:
1638 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1639 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1640 break;
1642 case CALL_EXPR:
1643 /* Look for things that can throw exceptions, and record them. */
1644 if (state->cur_region && tree_could_throw_p (t))
1646 record_stmt_eh_region (state->cur_region, t);
1647 note_eh_region_may_contain_throw (state->cur_region);
1649 break;
1651 case GIMPLE_MODIFY_STMT:
1652 /* Look for things that can throw exceptions, and record them. */
1653 if (state->cur_region && tree_could_throw_p (t))
1655 record_stmt_eh_region (state->cur_region, t);
1656 note_eh_region_may_contain_throw (state->cur_region);
1658 break;
1660 case GOTO_EXPR:
1661 case RETURN_EXPR:
1662 maybe_record_in_goto_queue (state, t);
1663 break;
1664 case SWITCH_EXPR:
1665 verify_norecord_switch_expr (state, t);
1666 break;
1668 case TRY_FINALLY_EXPR:
1669 lower_try_finally (state, tp);
1670 break;
1672 case TRY_CATCH_EXPR:
1673 i = tsi_start (TREE_OPERAND (t, 1));
1674 switch (TREE_CODE (tsi_stmt (i)))
1676 case CATCH_EXPR:
1677 lower_catch (state, tp);
1678 break;
1679 case EH_FILTER_EXPR:
1680 lower_eh_filter (state, tp);
1681 break;
1682 default:
1683 lower_cleanup (state, tp);
1684 break;
1686 break;
1688 case STATEMENT_LIST:
1689 for (i = tsi_start (t); !tsi_end_p (i); )
1691 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1692 t = tsi_stmt (i);
1693 if (TREE_CODE (t) == STATEMENT_LIST)
1695 tsi_link_before (&i, t, TSI_SAME_STMT);
1696 tsi_delink (&i);
1698 else
1699 tsi_next (&i);
1701 break;
1703 default:
1704 /* A type, a decl, or some kind of statement that we're not
1705 interested in. Don't walk them. */
1706 break;
1710 static unsigned int
1711 lower_eh_constructs (void)
1713 struct leh_state null_state;
1714 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1716 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1718 collect_finally_tree (*tp, NULL);
1720 memset (&null_state, 0, sizeof (null_state));
1721 lower_eh_constructs_1 (&null_state, tp);
1723 htab_delete (finally_tree);
1725 collect_eh_region_array ();
1726 return 0;
1729 struct gimple_opt_pass pass_lower_eh =
1732 GIMPLE_PASS,
1733 "eh", /* name */
1734 NULL, /* gate */
1735 lower_eh_constructs, /* execute */
1736 NULL, /* sub */
1737 NULL, /* next */
1738 0, /* static_pass_number */
1739 TV_TREE_EH, /* tv_id */
1740 PROP_gimple_lcf, /* properties_required */
1741 PROP_gimple_leh, /* properties_provided */
1742 0, /* properties_destroyed */
1743 0, /* todo_flags_start */
1744 TODO_dump_func /* todo_flags_finish */
1749 /* Construct EH edges for STMT. */
1751 static void
1752 make_eh_edge (struct eh_region *region, void *data)
1754 tree stmt, lab;
1755 basic_block src, dst;
1757 stmt = (tree) data;
1758 lab = get_eh_region_tree_label (region);
1760 src = bb_for_stmt (stmt);
1761 dst = label_to_block (lab);
1763 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1766 void
1767 make_eh_edges (tree stmt)
1769 int region_nr;
1770 bool is_resx;
1772 if (TREE_CODE (stmt) == RESX_EXPR)
1774 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1775 is_resx = true;
1777 else
1779 region_nr = lookup_stmt_eh_region (stmt);
1780 if (region_nr < 0)
1781 return;
1782 is_resx = false;
1785 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1788 static bool mark_eh_edge_found_error;
1790 /* Mark edge make_eh_edge would create for given region by setting it aux
1791 field, output error if something goes wrong. */
1792 static void
1793 mark_eh_edge (struct eh_region *region, void *data)
1795 tree stmt, lab;
1796 basic_block src, dst;
1797 edge e;
1799 stmt = (tree) data;
1800 lab = get_eh_region_tree_label (region);
1802 src = bb_for_stmt (stmt);
1803 dst = label_to_block (lab);
1805 e = find_edge (src, dst);
1806 if (!e)
1808 error ("EH edge %i->%i is missing", src->index, dst->index);
1809 mark_eh_edge_found_error = true;
1811 else if (!(e->flags & EDGE_EH))
1813 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1814 mark_eh_edge_found_error = true;
1816 else if (e->aux)
1818 /* ??? might not be mistake. */
1819 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1820 mark_eh_edge_found_error = true;
1822 else
1823 e->aux = (void *)1;
1826 /* Verify that BB containing stmt as last stmt has precisely the edges
1827 make_eh_edges would create. */
1828 bool
1829 verify_eh_edges (tree stmt)
1831 int region_nr;
1832 bool is_resx;
1833 basic_block bb = bb_for_stmt (stmt);
1834 edge_iterator ei;
1835 edge e;
1837 FOR_EACH_EDGE (e, ei, bb->succs)
1838 gcc_assert (!e->aux);
1839 mark_eh_edge_found_error = false;
1840 if (TREE_CODE (stmt) == RESX_EXPR)
1842 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1843 is_resx = true;
1845 else
1847 region_nr = lookup_stmt_eh_region (stmt);
1848 if (region_nr < 0)
1850 FOR_EACH_EDGE (e, ei, bb->succs)
1851 if (e->flags & EDGE_EH)
1853 error ("BB %i can not throw but has EH edges", bb->index);
1854 return true;
1856 return false;
1858 if (!tree_could_throw_p (stmt))
1860 error ("BB %i last statement has incorrectly set region", bb->index);
1861 return true;
1863 is_resx = false;
1866 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1867 FOR_EACH_EDGE (e, ei, bb->succs)
1869 if ((e->flags & EDGE_EH) && !e->aux)
1871 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1872 mark_eh_edge_found_error = true;
1873 return true;
1875 e->aux = NULL;
1877 return mark_eh_edge_found_error;
1881 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1882 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1883 This routine expects only GIMPLE lhs or rhs input. */
1885 bool
1886 tree_could_trap_p (tree expr)
1888 enum tree_code code = TREE_CODE (expr);
1889 bool honor_nans = false;
1890 bool honor_snans = false;
1891 bool fp_operation = false;
1892 bool honor_trapv = false;
1893 tree t, base;
1895 if (TREE_CODE_CLASS (code) == tcc_comparison
1896 || TREE_CODE_CLASS (code) == tcc_unary
1897 || TREE_CODE_CLASS (code) == tcc_binary)
1899 t = TREE_TYPE (expr);
1900 if (COMPARISON_CLASS_P (expr))
1901 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
1902 else
1903 fp_operation = FLOAT_TYPE_P (t);
1904 if (fp_operation)
1906 honor_nans = flag_trapping_math && !flag_finite_math_only;
1907 honor_snans = flag_signaling_nans != 0;
1909 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1910 honor_trapv = true;
1913 restart:
1914 switch (code)
1916 case TARGET_MEM_REF:
1917 /* For TARGET_MEM_REFs use the information based on the original
1918 reference. */
1919 expr = TMR_ORIGINAL (expr);
1920 code = TREE_CODE (expr);
1921 goto restart;
1923 case COMPONENT_REF:
1924 case REALPART_EXPR:
1925 case IMAGPART_EXPR:
1926 case BIT_FIELD_REF:
1927 case VIEW_CONVERT_EXPR:
1928 case WITH_SIZE_EXPR:
1929 expr = TREE_OPERAND (expr, 0);
1930 code = TREE_CODE (expr);
1931 goto restart;
1933 case ARRAY_RANGE_REF:
1934 base = TREE_OPERAND (expr, 0);
1935 if (tree_could_trap_p (base))
1936 return true;
1938 if (TREE_THIS_NOTRAP (expr))
1939 return false;
1941 return !range_in_array_bounds_p (expr);
1943 case ARRAY_REF:
1944 base = TREE_OPERAND (expr, 0);
1945 if (tree_could_trap_p (base))
1946 return true;
1948 if (TREE_THIS_NOTRAP (expr))
1949 return false;
1951 return !in_array_bounds_p (expr);
1953 case INDIRECT_REF:
1954 case ALIGN_INDIRECT_REF:
1955 case MISALIGNED_INDIRECT_REF:
1956 return !TREE_THIS_NOTRAP (expr);
1958 case ASM_EXPR:
1959 return TREE_THIS_VOLATILE (expr);
1961 case TRUNC_DIV_EXPR:
1962 case CEIL_DIV_EXPR:
1963 case FLOOR_DIV_EXPR:
1964 case ROUND_DIV_EXPR:
1965 case EXACT_DIV_EXPR:
1966 case CEIL_MOD_EXPR:
1967 case FLOOR_MOD_EXPR:
1968 case ROUND_MOD_EXPR:
1969 case TRUNC_MOD_EXPR:
1970 case RDIV_EXPR:
1971 if (honor_snans || honor_trapv)
1972 return true;
1973 if (fp_operation)
1974 return flag_trapping_math;
1975 t = TREE_OPERAND (expr, 1);
1976 if (!TREE_CONSTANT (t) || integer_zerop (t))
1977 return true;
1978 return false;
1980 case LT_EXPR:
1981 case LE_EXPR:
1982 case GT_EXPR:
1983 case GE_EXPR:
1984 case LTGT_EXPR:
1985 /* Some floating point comparisons may trap. */
1986 return honor_nans;
1988 case EQ_EXPR:
1989 case NE_EXPR:
1990 case UNORDERED_EXPR:
1991 case ORDERED_EXPR:
1992 case UNLT_EXPR:
1993 case UNLE_EXPR:
1994 case UNGT_EXPR:
1995 case UNGE_EXPR:
1996 case UNEQ_EXPR:
1997 return honor_snans;
1999 case CONVERT_EXPR:
2000 case FIX_TRUNC_EXPR:
2001 /* Conversion of floating point might trap. */
2002 return honor_nans;
2004 case NEGATE_EXPR:
2005 case ABS_EXPR:
2006 case CONJ_EXPR:
2007 /* These operations don't trap with floating point. */
2008 if (honor_trapv)
2009 return true;
2010 return false;
2012 case PLUS_EXPR:
2013 case MINUS_EXPR:
2014 case MULT_EXPR:
2015 /* Any floating arithmetic may trap. */
2016 if (fp_operation && flag_trapping_math)
2017 return true;
2018 if (honor_trapv)
2019 return true;
2020 return false;
2022 case CALL_EXPR:
2023 t = get_callee_fndecl (expr);
2024 /* Assume that calls to weak functions may trap. */
2025 if (!t || !DECL_P (t) || DECL_WEAK (t))
2026 return true;
2027 return false;
2029 default:
2030 /* Any floating arithmetic may trap. */
2031 if (fp_operation && flag_trapping_math)
2032 return true;
2033 return false;
2037 bool
2038 tree_could_throw_p (tree t)
2040 if (!flag_exceptions)
2041 return false;
2042 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2044 if (flag_non_call_exceptions
2045 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2046 return true;
2047 t = GIMPLE_STMT_OPERAND (t, 1);
2050 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2051 t = TREE_OPERAND (t, 0);
2052 if (TREE_CODE (t) == CALL_EXPR)
2053 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2054 if (flag_non_call_exceptions)
2055 return tree_could_trap_p (t);
2056 return false;
2059 bool
2060 tree_can_throw_internal (const_tree stmt)
2062 int region_nr;
2063 bool is_resx = false;
2065 if (TREE_CODE (stmt) == RESX_EXPR)
2066 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2067 else
2068 region_nr = lookup_stmt_eh_region (stmt);
2069 if (region_nr < 0)
2070 return false;
2071 return can_throw_internal_1 (region_nr, is_resx);
2074 bool
2075 tree_can_throw_external (tree stmt)
2077 int region_nr;
2078 bool is_resx = false;
2080 if (TREE_CODE (stmt) == RESX_EXPR)
2081 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2082 else
2083 region_nr = lookup_stmt_eh_region (stmt);
2084 if (region_nr < 0)
2085 return tree_could_throw_p (stmt);
2086 else
2087 return can_throw_external_1 (region_nr, is_resx);
2090 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2091 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2092 in the table if it should be in there. Return TRUE if a replacement was
2093 done that my require an EH edge purge. */
2095 bool
2096 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2098 int region_nr = lookup_stmt_eh_region (old_stmt);
2100 if (region_nr >= 0)
2102 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2104 if (new_stmt == old_stmt && new_stmt_could_throw)
2105 return false;
2107 remove_stmt_from_eh_region (old_stmt);
2108 if (new_stmt_could_throw)
2110 add_stmt_to_eh_region (new_stmt, region_nr);
2111 return false;
2113 else
2114 return true;
2117 return false;
2120 /* Returns TRUE if oneh and twoh are exception handlers (op 1 of
2121 TRY_CATCH_EXPR or TRY_FINALLY_EXPR that are similar enough to be
2122 considered the same. Currently this only handles handlers consisting of
2123 a single call, as that's the important case for C++: a destructor call
2124 for a particular object showing up in multiple handlers. */
2126 static bool
2127 same_handler_p (tree oneh, tree twoh)
2129 tree_stmt_iterator i;
2130 tree ones, twos;
2131 int ai;
2133 i = tsi_start (oneh);
2134 if (!tsi_one_before_end_p (i))
2135 return false;
2136 ones = tsi_stmt (i);
2138 i = tsi_start (twoh);
2139 if (!tsi_one_before_end_p (i))
2140 return false;
2141 twos = tsi_stmt (i);
2143 if (TREE_CODE (ones) != CALL_EXPR
2144 || TREE_CODE (twos) != CALL_EXPR
2145 || !operand_equal_p (CALL_EXPR_FN (ones), CALL_EXPR_FN (twos), 0)
2146 || call_expr_nargs (ones) != call_expr_nargs (twos))
2147 return false;
2149 for (ai = 0; ai < call_expr_nargs (ones); ++ai)
2150 if (!operand_equal_p (CALL_EXPR_ARG (ones, ai),
2151 CALL_EXPR_ARG (twos, ai), 0))
2152 return false;
2154 return true;
2157 /* Optimize
2158 try { A() } finally { try { ~B() } catch { ~A() } }
2159 try { ... } finally { ~A() }
2160 into
2161 try { A() } catch { ~B() }
2162 try { ~B() ... } finally { ~A() }
2164 This occurs frequently in C++, where A is a local variable and B is a
2165 temporary used in the initializer for A. */
2167 static void
2168 optimize_double_finally (tree one, tree two)
2170 tree oneh;
2171 tree_stmt_iterator i;
2173 i = tsi_start (TREE_OPERAND (one, 1));
2174 if (!tsi_one_before_end_p (i))
2175 return;
2177 oneh = tsi_stmt (i);
2178 if (TREE_CODE (oneh) != TRY_CATCH_EXPR)
2179 return;
2181 if (same_handler_p (TREE_OPERAND (oneh, 1), TREE_OPERAND (two, 1)))
2183 tree b = TREE_OPERAND (oneh, 0);
2184 TREE_OPERAND (one, 1) = b;
2185 TREE_SET_CODE (one, TRY_CATCH_EXPR);
2187 i = tsi_start (TREE_OPERAND (two, 0));
2188 tsi_link_before (&i, unsave_expr_now (b), TSI_SAME_STMT);
2192 /* Perform EH refactoring optimizations that are simpler to do when code
2193 flow has been lowered but EH structures haven't. */
2195 static void
2196 refactor_eh_r (tree t)
2198 tailrecurse:
2199 switch (TREE_CODE (t))
2201 case TRY_FINALLY_EXPR:
2202 case TRY_CATCH_EXPR:
2203 refactor_eh_r (TREE_OPERAND (t, 0));
2204 t = TREE_OPERAND (t, 1);
2205 goto tailrecurse;
2207 case CATCH_EXPR:
2208 t = CATCH_BODY (t);
2209 goto tailrecurse;
2211 case EH_FILTER_EXPR:
2212 t = EH_FILTER_FAILURE (t);
2213 goto tailrecurse;
2215 case STATEMENT_LIST:
2217 tree_stmt_iterator i;
2218 tree one = NULL_TREE, two = NULL_TREE;
2219 /* Try to refactor double try/finally. */
2220 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2222 one = two;
2223 two = tsi_stmt (i);
2224 if (one && two
2225 && TREE_CODE (one) == TRY_FINALLY_EXPR
2226 && TREE_CODE (two) == TRY_FINALLY_EXPR)
2227 optimize_double_finally (one, two);
2228 if (one)
2229 refactor_eh_r (one);
2231 if (two)
2233 t = two;
2234 goto tailrecurse;
2237 break;
2239 default:
2240 /* A type, a decl, or some kind of statement that we're not
2241 interested in. Don't walk them. */
2242 break;
2246 static unsigned
2247 refactor_eh (void)
2249 refactor_eh_r (DECL_SAVED_TREE (current_function_decl));
2250 return 0;
2253 struct gimple_opt_pass pass_refactor_eh =
2256 GIMPLE_PASS,
2257 "ehopt", /* name */
2258 NULL, /* gate */
2259 refactor_eh, /* execute */
2260 NULL, /* sub */
2261 NULL, /* next */
2262 0, /* static_pass_number */
2263 TV_TREE_EH, /* tv_id */
2264 PROP_gimple_lcf, /* properties_required */
2265 0, /* properties_provided */
2266 0, /* properties_destroyed */
2267 0, /* todo_flags_start */
2268 TODO_dump_func /* todo_flags_finish */