* target.h (struct gcc_target): Add new field to struct cxx: import_export_class.
[official-gcc.git] / gcc / tree-eh.c
blobc0ddf3e7c3c6c8a6e089bc0cdd88166268be49e0
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
40 /* HACK */
41 extern int using_eh_for_cleanups_p;
43 /* Misc functions used in this file. */
45 /* Compare and hash for any structure which begins with a canonical
46 pointer. Assumes all pointers are interchangable, which is sort
47 of already assumed by gcc elsewhere IIRC. */
49 static int
50 struct_ptr_eq (const void *a, const void *b)
52 const void * const * x = a;
53 const void * const * y = b;
54 return *x == *y;
57 static hashval_t
58 struct_ptr_hash (const void *a)
60 const void * const * x = a;
61 return (size_t)*x >> 4;
65 /* Remember and lookup EH region data for arbitrary statements.
66 Really this means any statement that could_throw_p. We could
67 stuff this information into the stmt_ann data structure, but:
69 (1) We absolutely rely on this information being kept until
70 we get to rtl. Once we're done with lowering here, if we lose
71 the information there's no way to recover it!
73 (2) There are many more statements that *cannot* throw as
74 compared to those that can. We should be saving some amount
75 of space by only allocating memory for those that can throw. */
77 struct throw_stmt_node GTY(())
79 tree stmt;
80 int region_nr;
83 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
88 struct throw_stmt_node *n;
89 void **slot;
91 if (!region)
92 return;
94 n = ggc_alloc (sizeof (*n));
95 n->stmt = t;
96 n->region_nr = get_eh_region_number (region);
98 slot = htab_find_slot (throw_stmt_table, n, INSERT);
99 if (*slot)
100 abort ();
101 *slot = n;
104 void
105 add_stmt_to_eh_region (tree t, int num)
107 struct throw_stmt_node *n;
108 void **slot;
110 if (num < 0)
111 abort ();
113 n = ggc_alloc (sizeof (*n));
114 n->stmt = t;
115 n->region_nr = num;
117 slot = htab_find_slot (throw_stmt_table, n, INSERT);
118 if (*slot)
119 abort ();
120 *slot = n;
123 bool
124 remove_stmt_from_eh_region (tree t)
126 struct throw_stmt_node dummy;
127 void **slot;
129 if (!throw_stmt_table)
130 return false;
132 dummy.stmt = t;
133 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
134 if (slot)
136 htab_clear_slot (throw_stmt_table, slot);
137 return true;
139 else
140 return false;
144 lookup_stmt_eh_region (tree t)
146 struct throw_stmt_node *p, n;
148 if (!throw_stmt_table)
149 return -2;
151 n.stmt = t;
152 p = htab_find (throw_stmt_table, &n);
154 return (p ? p->region_nr : -1);
159 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
160 nodes and LABEL_DECL nodes. We will use this during the second phase to
161 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
163 struct finally_tree_node
165 tree child, parent;
168 /* Note that this table is *not* marked GTY. It is short-lived. */
169 static htab_t finally_tree;
171 static void
172 record_in_finally_tree (tree child, tree parent)
174 struct finally_tree_node *n;
175 void **slot;
177 n = xmalloc (sizeof (*n));
178 n->child = child;
179 n->parent = parent;
181 slot = htab_find_slot (finally_tree, n, INSERT);
182 if (*slot)
183 abort ();
184 *slot = n;
187 static void
188 collect_finally_tree (tree t, tree region)
190 tailrecurse:
191 switch (TREE_CODE (t))
193 case LABEL_EXPR:
194 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
195 break;
197 case TRY_FINALLY_EXPR:
198 record_in_finally_tree (t, region);
199 collect_finally_tree (TREE_OPERAND (t, 0), t);
200 t = TREE_OPERAND (t, 1);
201 goto tailrecurse;
203 case TRY_CATCH_EXPR:
204 collect_finally_tree (TREE_OPERAND (t, 0), region);
205 t = TREE_OPERAND (t, 1);
206 goto tailrecurse;
208 case CATCH_EXPR:
209 t = CATCH_BODY (t);
210 goto tailrecurse;
212 case EH_FILTER_EXPR:
213 t = EH_FILTER_FAILURE (t);
214 goto tailrecurse;
216 case STATEMENT_LIST:
218 tree_stmt_iterator i;
219 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
220 collect_finally_tree (tsi_stmt (i), region);
222 break;
224 default:
225 /* A type, a decl, or some kind of statement that we're not
226 interested in. Don't walk them. */
227 break;
231 /* Use the finally tree to determine if a jump from START to TARGET
232 would leave the try_finally node that START lives in. */
234 static bool
235 outside_finally_tree (tree start, tree target)
237 struct finally_tree_node n, *p;
241 n.child = start;
242 p = htab_find (finally_tree, &n);
243 if (!p)
244 return true;
245 start = p->parent;
247 while (start != target);
249 return false;
252 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
253 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
254 The eh region creation is straight-forward, but frobbing all the gotos
255 and such into shape isn't. */
257 /* State of the world while lowering. */
259 struct leh_state
261 /* What's "current" while constructing the eh region tree. These
262 correspond to variables of the same name in cfun->eh, which we
263 don't have easy access to. */
264 struct eh_region *cur_region;
265 struct eh_region *prev_try;
267 /* Processing of TRY_FINALLY requires a bit more state. This is
268 split out into a separate structure so that we don't have to
269 copy so much when processing other nodes. */
270 struct leh_tf_state *tf;
273 struct leh_tf_state
275 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
276 is the original TRY_FINALLY_EXPR. We need to retain this so that
277 outside_finally_tree can reliably reference the tree used in the
278 collect_finally_tree data structures. */
279 tree try_finally_expr;
280 tree *top_p;
282 /* The state outside this try_finally node. */
283 struct leh_state *outer;
285 /* The exception region created for it. */
286 struct eh_region *region;
288 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
289 that are seen to escape this TRY_FINALLY_EXPR node. */
290 struct goto_queue_node {
291 tree stmt;
292 tree repl_stmt;
293 tree cont_stmt;
294 int index;
295 } *goto_queue;
296 size_t goto_queue_size;
297 size_t goto_queue_active;
299 /* The set of unique labels seen as entries in the goto queue. */
300 varray_type dest_array;
302 /* A label to be added at the end of the completed transformed
303 sequence. It will be set if may_fallthru was true *at one time*,
304 though subsequent transformations may have cleared that flag. */
305 tree fallthru_label;
307 /* A label that has been registered with except.c to be the
308 landing pad for this try block. */
309 tree eh_label;
311 /* True if it is possible to fall out the bottom of the try block.
312 Cleared if the fallthru is converted to a goto. */
313 bool may_fallthru;
315 /* True if any entry in goto_queue is a RETURN_EXPR. */
316 bool may_return;
318 /* True if the finally block can receive an exception edge.
319 Cleared if the exception case is handled by code duplication. */
320 bool may_throw;
323 static void lower_eh_filter (struct leh_state *, tree *);
324 static void lower_eh_constructs_1 (struct leh_state *, tree *);
326 /* Comparison function for qsort/bsearch. We're interested in
327 searching goto queue elements for source statements. */
329 static int
330 goto_queue_cmp (const void *x, const void *y)
332 tree a = ((const struct goto_queue_node *)x)->stmt;
333 tree b = ((const struct goto_queue_node *)y)->stmt;
334 return (a == b ? 0 : a < b ? -1 : 1);
337 /* Search for STMT in the goto queue. Return the replacement,
338 or null if the statement isn't in the queue. */
340 static tree
341 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
343 struct goto_queue_node tmp, *ret;
344 tmp.stmt = stmt;
345 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
346 sizeof (struct goto_queue_node), goto_queue_cmp);
347 return (ret ? ret->repl_stmt : NULL);
350 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
351 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
352 then we can just splat it in, otherwise we add the new stmts immediately
353 after the COND_EXPR and redirect. */
355 static void
356 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
357 tree_stmt_iterator *tsi)
359 tree new, one, label;
361 new = find_goto_replacement (tf, *tp);
362 if (!new)
363 return;
365 one = expr_only (new);
366 if (one && TREE_CODE (one) == GOTO_EXPR)
368 *tp = one;
369 return;
372 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
373 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
375 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
376 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
379 /* The real work of replace_goto_queue. Returns with TSI updated to
380 point to the next statement. */
382 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
384 static void
385 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
387 switch (TREE_CODE (t))
389 case GOTO_EXPR:
390 case RETURN_EXPR:
391 t = find_goto_replacement (tf, t);
392 if (t)
394 tsi_link_before (tsi, t, TSI_SAME_STMT);
395 tsi_delink (tsi);
396 return;
398 break;
400 case COND_EXPR:
401 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
402 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
403 break;
405 case TRY_FINALLY_EXPR:
406 case TRY_CATCH_EXPR:
407 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
408 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
409 break;
410 case CATCH_EXPR:
411 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
412 break;
413 case EH_FILTER_EXPR:
414 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
415 break;
417 case STATEMENT_LIST:
418 abort ();
420 default:
421 /* These won't have gotos in them. */
422 break;
425 tsi_next (tsi);
428 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
430 static void
431 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
433 tree_stmt_iterator i = tsi_start (t);
434 while (!tsi_end_p (i))
435 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
438 /* Replace all goto queue members. */
440 static void
441 replace_goto_queue (struct leh_tf_state *tf)
443 replace_goto_queue_stmt_list (*tf->top_p, tf);
446 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
447 node, and if so record that fact in the goto queue associated with that
448 try_finally node. */
450 static void
451 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
453 struct leh_tf_state *tf = state->tf;
454 struct goto_queue_node *q;
455 size_t active, size;
456 int index;
458 if (!tf)
459 return;
461 switch (TREE_CODE (stmt))
463 case GOTO_EXPR:
465 tree lab = GOTO_DESTINATION (stmt);
467 /* Computed and non-local gotos do not get processed. Given
468 their nature we can neither tell whether we've escaped the
469 finally block nor redirect them if we knew. */
470 if (TREE_CODE (lab) != LABEL_DECL)
471 return;
473 /* No need to record gotos that don't leave the try block. */
474 if (! outside_finally_tree (lab, tf->try_finally_expr))
475 return;
477 if (! tf->dest_array)
479 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
480 VARRAY_PUSH_TREE (tf->dest_array, lab);
481 index = 0;
483 else
485 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
486 for (index = 0; index < n; ++index)
487 if (VARRAY_TREE (tf->dest_array, index) == lab)
488 break;
489 if (index == n)
490 VARRAY_PUSH_TREE (tf->dest_array, lab);
493 break;
495 case RETURN_EXPR:
496 tf->may_return = true;
497 index = -1;
498 break;
500 default:
501 abort ();
504 active = tf->goto_queue_active;
505 size = tf->goto_queue_size;
506 if (active >= size)
508 size = (size ? size * 2 : 32);
509 tf->goto_queue_size = size;
510 tf->goto_queue
511 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
514 q = &tf->goto_queue[active];
515 tf->goto_queue_active = active + 1;
517 memset (q, 0, sizeof (*q));
518 q->stmt = stmt;
519 q->index = index;
522 #ifdef ENABLE_CHECKING
523 /* We do not process SWITCH_EXPRs for now. As long as the original source
524 was in fact structured, and we've not yet done jump threading, then none
525 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
527 static void
528 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
530 struct leh_tf_state *tf = state->tf;
531 size_t i, n;
532 tree vec;
534 if (!tf)
535 return;
537 vec = SWITCH_LABELS (switch_expr);
538 n = TREE_VEC_LENGTH (vec);
540 for (i = 0; i < n; ++i)
542 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
543 if (outside_finally_tree (lab, tf->try_finally_expr))
544 abort ();
547 #else
548 #define verify_norecord_switch_expr(state, switch_expr)
549 #endif
551 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
552 whatever is needed to finish the return. If MOD is non-null, insert it
553 before the new branch. RETURN_VALUE_P is a cache containing a temporary
554 variable to be used in manipulating the value returned from the function. */
556 static void
557 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
558 tree *return_value_p)
560 tree ret_expr = TREE_OPERAND (q->stmt, 0);
561 tree x;
563 if (ret_expr)
565 /* The nasty part about redirecting the return value is that the
566 return value itself is to be computed before the FINALLY block
567 is executed. e.g.
569 int x;
570 int foo (void)
572 x = 0;
573 try {
574 return x;
575 } finally {
576 x++;
580 should return 0, not 1. Arrange for this to happen by copying
581 computed the return value into a local temporary. This also
582 allows us to redirect multiple return statements through the
583 same destination block; whether this is a net win or not really
584 depends, I guess, but it does make generation of the switch in
585 lower_try_finally_switch easier. */
587 if (TREE_CODE (ret_expr) == RESULT_DECL)
589 if (!*return_value_p)
590 *return_value_p = ret_expr;
591 else if (*return_value_p != ret_expr)
592 abort ();
593 q->cont_stmt = q->stmt;
595 else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
597 tree result = TREE_OPERAND (ret_expr, 0);
598 tree new, old = TREE_OPERAND (ret_expr, 1);
600 if (!*return_value_p)
602 if (aggregate_value_p (TREE_TYPE (result),
603 TREE_TYPE (current_function_decl)))
604 /* If this function returns in memory, copy the argument
605 into the return slot now. Otherwise, we might need to
606 worry about magic return semantics, so we need to use a
607 temporary to hold the value until we're actually ready
608 to return. */
609 new = result;
610 else
611 new = create_tmp_var (TREE_TYPE (old), "rettmp");
612 *return_value_p = new;
614 else
615 new = *return_value_p;
617 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
618 append_to_statement_list (x, &q->repl_stmt);
620 if (new == result)
621 x = result;
622 else
623 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
624 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
626 else
627 abort ();
629 else
631 /* If we don't return a value, all return statements are the same. */
632 q->cont_stmt = q->stmt;
635 if (mod)
636 append_to_statement_list (mod, &q->repl_stmt);
638 x = build1 (GOTO_EXPR, void_type_node, finlab);
639 append_to_statement_list (x, &q->repl_stmt);
642 /* Similar, but easier, for GOTO_EXPR. */
644 static void
645 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
647 tree x;
649 q->cont_stmt = q->stmt;
650 if (mod)
651 append_to_statement_list (mod, &q->repl_stmt);
653 x = build1 (GOTO_EXPR, void_type_node, finlab);
654 append_to_statement_list (x, &q->repl_stmt);
657 /* We want to transform
658 try { body; } catch { stuff; }
660 body; goto over; lab: stuff; over:
662 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
663 should be placed before the second operand, or NULL. OVER is
664 an existing label that should be put at the exit, or NULL. */
666 static void
667 frob_into_branch_around (tree *tp, tree lab, tree over)
669 tree x, op1;
671 op1 = TREE_OPERAND (*tp, 1);
672 *tp = TREE_OPERAND (*tp, 0);
674 if (block_may_fallthru (*tp))
676 if (!over)
677 over = create_artificial_label ();
678 x = build1 (GOTO_EXPR, void_type_node, over);
679 append_to_statement_list (x, tp);
682 if (lab)
684 x = build1 (LABEL_EXPR, void_type_node, lab);
685 append_to_statement_list (x, tp);
688 append_to_statement_list (op1, tp);
690 if (over)
692 x = build1 (LABEL_EXPR, void_type_node, over);
693 append_to_statement_list (x, tp);
697 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
698 Make sure to record all new labels found. */
700 static tree
701 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
703 tree region = NULL;
705 t = lhd_unsave_expr_now (t);
707 if (outer_state->tf)
708 region = outer_state->tf->try_finally_expr;
709 collect_finally_tree (t, region);
711 return t;
714 /* A subroutine of lower_try_finally. Create a fallthru label for
715 the given try_finally state. The only tricky bit here is that
716 we have to make sure to record the label in our outer context. */
718 static tree
719 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
721 tree label = tf->fallthru_label;
722 if (!label)
724 label = create_artificial_label ();
725 tf->fallthru_label = label;
726 if (tf->outer->tf)
727 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
729 return label;
732 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
733 returns non-null, then the language requires that the exception path out
734 of a try_finally be treated specially. To wit: the code within the
735 finally block may not itself throw an exception. We have two choices here.
736 First we can duplicate the finally block and wrap it in a must_not_throw
737 region. Second, we can generate code like
739 try {
740 finally_block;
741 } catch {
742 if (fintmp == eh_edge)
743 protect_cleanup_actions;
746 where "fintmp" is the temporary used in the switch statement generation
747 alternative considered below. For the nonce, we always choose the first
748 option.
750 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
752 static void
753 honor_protect_cleanup_actions (struct leh_state *outer_state,
754 struct leh_state *this_state,
755 struct leh_tf_state *tf)
757 tree protect_cleanup_actions, finally, x;
758 tree_stmt_iterator i;
759 bool finally_may_fallthru;
761 /* First check for nothing to do. */
762 if (lang_protect_cleanup_actions)
763 protect_cleanup_actions = lang_protect_cleanup_actions ();
764 else
765 protect_cleanup_actions = NULL;
767 finally = TREE_OPERAND (*tf->top_p, 1);
769 /* If the EH case of the finally block can fall through, this may be a
770 structure of the form
771 try {
772 try {
773 throw ...;
774 } cleanup {
775 try {
776 throw ...;
777 } catch (...) {
780 } catch (...) {
781 yyy;
783 E.g. with an inline destructor with an embedded try block. In this
784 case we must save the runtime EH data around the nested exception.
786 This complication means that any time the previous runtime data might
787 be used (via fallthru from the finally) we handle the eh case here,
788 whether or not protect_cleanup_actions is active. */
790 finally_may_fallthru = block_may_fallthru (finally);
791 if (!finally_may_fallthru && !protect_cleanup_actions)
792 return;
794 /* Duplicate the FINALLY block. Only need to do this for try-finally,
795 and not for cleanups. */
796 if (this_state)
797 finally = lower_try_finally_dup_block (finally, outer_state);
799 /* Resume execution after the exception. Adding this now lets
800 lower_eh_filter not add unnecessary gotos, as it is clear that
801 we never fallthru from this copy of the finally block. */
802 if (finally_may_fallthru)
804 tree save_eptr, save_filt;
806 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
807 save_filt = create_tmp_var (integer_type_node, "save_filt");
809 i = tsi_start (finally);
810 x = build (EXC_PTR_EXPR, ptr_type_node);
811 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
812 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
814 x = build (FILTER_EXPR, integer_type_node);
815 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
816 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
818 i = tsi_last (finally);
819 x = build (EXC_PTR_EXPR, ptr_type_node);
820 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
821 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
823 x = build (FILTER_EXPR, integer_type_node);
824 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
825 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
827 x = build1 (RESX_EXPR, void_type_node,
828 build_int_2 (get_eh_region_number (tf->region), 0));
829 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
832 /* Wrap the block with protect_cleanup_actions as the action. */
833 if (protect_cleanup_actions)
835 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
836 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
837 EH_FILTER_MUST_NOT_THROW (x) = 1;
838 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
839 lower_eh_filter (outer_state, &finally);
841 else
842 lower_eh_constructs_1 (outer_state, &finally);
844 /* Hook this up to the end of the existing try block. If we
845 previously fell through the end, we'll have to branch around.
846 This means adding a new goto, and adding it to the queue. */
848 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
850 if (tf->may_fallthru)
852 x = lower_try_finally_fallthru_label (tf);
853 x = build1 (GOTO_EXPR, void_type_node, x);
854 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
856 if (this_state)
857 maybe_record_in_goto_queue (this_state, x);
859 tf->may_fallthru = false;
862 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
864 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
866 /* Having now been handled, EH isn't to be considered with
867 the rest of the outgoing edges. */
868 tf->may_throw = false;
871 /* A subroutine of lower_try_finally. We have determined that there is
872 no fallthru edge out of the finally block. This means that there is
873 no outgoing edge corresponding to any incoming edge. Restructure the
874 try_finally node for this special case. */
876 static void
877 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
879 tree x, finally, lab, return_val;
880 struct goto_queue_node *q, *qe;
882 if (tf->may_throw)
883 lab = tf->eh_label;
884 else
885 lab = create_artificial_label ();
887 finally = TREE_OPERAND (*tf->top_p, 1);
888 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
890 x = build1 (LABEL_EXPR, void_type_node, lab);
891 append_to_statement_list (x, tf->top_p);
893 return_val = NULL;
894 q = tf->goto_queue;
895 qe = q + tf->goto_queue_active;
896 for (; q < qe; ++q)
897 if (q->index < 0)
898 do_return_redirection (q, lab, NULL, &return_val);
899 else
900 do_goto_redirection (q, lab, NULL);
902 replace_goto_queue (tf);
904 lower_eh_constructs_1 (state, &finally);
905 append_to_statement_list (finally, tf->top_p);
908 /* A subroutine of lower_try_finally. We have determined that there is
909 exactly one destination of the finally block. Restructure the
910 try_finally node for this special case. */
912 static void
913 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
915 struct goto_queue_node *q, *qe;
916 tree x, finally, finally_label;
918 finally = TREE_OPERAND (*tf->top_p, 1);
919 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
921 lower_eh_constructs_1 (state, &finally);
923 if (tf->may_throw)
925 /* Only reachable via the exception edge. Add the given label to
926 the head of the FINALLY block. Append a RESX at the end. */
928 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
929 append_to_statement_list (x, tf->top_p);
931 append_to_statement_list (finally, tf->top_p);
933 x = build1 (RESX_EXPR, void_type_node,
934 build_int_2 (get_eh_region_number (tf->region), 0));
935 append_to_statement_list (x, tf->top_p);
937 return;
940 if (tf->may_fallthru)
942 /* Only reachable via the fallthru edge. Do nothing but let
943 the two blocks run together; we'll fall out the bottom. */
944 append_to_statement_list (finally, tf->top_p);
945 return;
948 finally_label = create_artificial_label ();
949 x = build1 (LABEL_EXPR, void_type_node, finally_label);
950 append_to_statement_list (x, tf->top_p);
952 append_to_statement_list (finally, tf->top_p);
954 q = tf->goto_queue;
955 qe = q + tf->goto_queue_active;
957 if (tf->may_return)
959 /* Reachable by return expressions only. Redirect them. */
960 tree return_val = NULL;
961 for (; q < qe; ++q)
962 do_return_redirection (q, finally_label, NULL, &return_val);
963 replace_goto_queue (tf);
965 else
967 /* Reachable by goto expressions only. Redirect them. */
968 for (; q < qe; ++q)
969 do_goto_redirection (q, finally_label, NULL);
970 replace_goto_queue (tf);
972 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
974 /* Reachable by goto to fallthru label only. Redirect it
975 to the new label (already created, sadly), and do not
976 emit the final branch out, or the fallthru label. */
977 tf->fallthru_label = NULL;
978 return;
982 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
983 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
986 /* A subroutine of lower_try_finally. There are multiple edges incoming
987 and outgoing from the finally block. Implement this by duplicating the
988 finally block for every destination. */
990 static void
991 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
993 tree finally, new_stmt;
994 tree x;
996 finally = TREE_OPERAND (*tf->top_p, 1);
997 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
999 new_stmt = NULL_TREE;
1001 if (tf->may_fallthru)
1003 x = lower_try_finally_dup_block (finally, state);
1004 lower_eh_constructs_1 (state, &x);
1005 append_to_statement_list (x, &new_stmt);
1007 x = lower_try_finally_fallthru_label (tf);
1008 x = build1 (GOTO_EXPR, void_type_node, x);
1009 append_to_statement_list (x, &new_stmt);
1012 if (tf->may_throw)
1014 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1015 append_to_statement_list (x, &new_stmt);
1017 x = lower_try_finally_dup_block (finally, state);
1018 lower_eh_constructs_1 (state, &x);
1019 append_to_statement_list (x, &new_stmt);
1021 x = build1 (RESX_EXPR, void_type_node,
1022 build_int_2 (get_eh_region_number (tf->region), 0));
1023 append_to_statement_list (x, &new_stmt);
1026 if (tf->goto_queue)
1028 struct goto_queue_node *q, *qe;
1029 tree return_val = NULL;
1030 int return_index;
1031 tree *labels;
1033 if (tf->dest_array)
1034 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1035 else
1036 return_index = 0;
1037 labels = xcalloc (sizeof (tree), return_index + 1);
1039 q = tf->goto_queue;
1040 qe = q + tf->goto_queue_active;
1041 for (; q < qe; q++)
1043 int index = q->index < 0 ? return_index : q->index;
1044 tree lab = labels[index];
1045 bool build_p = false;
1047 if (!lab)
1049 labels[index] = lab = create_artificial_label ();
1050 build_p = true;
1053 if (index == return_index)
1054 do_return_redirection (q, lab, NULL, &return_val);
1055 else
1056 do_goto_redirection (q, lab, NULL);
1058 if (build_p)
1060 x = build1 (LABEL_EXPR, void_type_node, lab);
1061 append_to_statement_list (x, &new_stmt);
1063 x = lower_try_finally_dup_block (finally, state);
1064 lower_eh_constructs_1 (state, &x);
1065 append_to_statement_list (x, &new_stmt);
1067 append_to_statement_list (q->cont_stmt, &new_stmt);
1068 maybe_record_in_goto_queue (state, q->cont_stmt);
1071 replace_goto_queue (tf);
1072 free (labels);
1075 /* Need to link new stmts after running replace_goto_queue due
1076 to not wanting to process the same goto stmts twice. */
1077 append_to_statement_list (new_stmt, tf->top_p);
1080 /* A subroutine of lower_try_finally. There are multiple edges incoming
1081 and outgoing from the finally block. Implement this by instrumenting
1082 each incoming edge and creating a switch statement at the end of the
1083 finally block that branches to the appropriate destination. */
1085 static void
1086 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1088 struct goto_queue_node *q, *qe;
1089 tree return_val = NULL;
1090 tree finally, finally_tmp, finally_label;
1091 int return_index, eh_index, fallthru_index;
1092 int nlabels, ndests, j, last_case_index;
1093 tree case_label_vec, switch_stmt, last_case, switch_body;
1094 tree x;
1096 /* Mash the TRY block to the head of the chain. */
1097 finally = TREE_OPERAND (*tf->top_p, 1);
1098 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1100 /* Lower the finally block itself. */
1101 lower_eh_constructs_1 (state, &finally);
1103 /* Prepare for switch statement generation. */
1104 if (tf->dest_array)
1105 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1106 else
1107 nlabels = 0;
1108 return_index = nlabels;
1109 eh_index = return_index + tf->may_return;
1110 fallthru_index = eh_index + tf->may_throw;
1111 ndests = fallthru_index + tf->may_fallthru;
1113 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1114 finally_label = create_artificial_label ();
1116 case_label_vec = make_tree_vec (ndests);
1117 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1118 NULL_TREE, case_label_vec);
1119 switch_body = NULL;
1120 last_case = NULL;
1121 last_case_index = 0;
1123 /* Begin inserting code for getting to the finally block. Things
1124 are done in this order to correspond to the sequence the code is
1125 layed out. */
1127 if (tf->may_fallthru)
1129 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1130 build_int_2 (fallthru_index, 0));
1131 append_to_statement_list (x, tf->top_p);
1133 if (tf->may_throw)
1135 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1136 append_to_statement_list (x, tf->top_p);
1140 last_case = build (CASE_LABEL_EXPR, void_type_node,
1141 build_int_2 (fallthru_index, 0), NULL,
1142 create_artificial_label ());
1143 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1144 last_case_index++;
1146 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1147 append_to_statement_list (x, &switch_body);
1149 x = lower_try_finally_fallthru_label (tf);
1150 x = build1 (GOTO_EXPR, void_type_node, x);
1151 append_to_statement_list (x, &switch_body);
1154 if (tf->may_throw)
1156 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1157 append_to_statement_list (x, tf->top_p);
1159 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1160 build_int_2 (eh_index, 0));
1161 append_to_statement_list (x, tf->top_p);
1163 last_case = build (CASE_LABEL_EXPR, void_type_node,
1164 build_int_2 (eh_index, 0), NULL,
1165 create_artificial_label ());
1166 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1167 last_case_index++;
1169 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1170 append_to_statement_list (x, &switch_body);
1171 x = build1 (RESX_EXPR, void_type_node,
1172 build_int_2 (get_eh_region_number (tf->region), 0));
1173 append_to_statement_list (x, &switch_body);
1176 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1177 append_to_statement_list (x, tf->top_p);
1179 append_to_statement_list (finally, tf->top_p);
1181 /* Redirect each incoming goto edge. */
1182 q = tf->goto_queue;
1183 qe = q + tf->goto_queue_active;
1184 j = last_case_index + tf->may_return;
1185 last_case_index += nlabels;
1186 for (; q < qe; ++q)
1188 tree mod;
1189 int switch_id, case_index;
1191 if (q->index < 0)
1193 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1194 build_int_2 (return_index, 0));
1195 do_return_redirection (q, finally_label, mod, &return_val);
1196 switch_id = return_index;
1198 else
1200 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1201 build_int_2 (q->index, 0));
1202 do_goto_redirection (q, finally_label, mod);
1203 switch_id = q->index;
1206 case_index = j + q->index;
1207 if (!TREE_VEC_ELT (case_label_vec, case_index))
1209 last_case = build (CASE_LABEL_EXPR, void_type_node,
1210 build_int_2 (switch_id, 0), NULL,
1211 create_artificial_label ());
1212 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1214 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1215 append_to_statement_list (x, &switch_body);
1216 append_to_statement_list (q->cont_stmt, &switch_body);
1217 maybe_record_in_goto_queue (state, q->cont_stmt);
1220 replace_goto_queue (tf);
1221 last_case_index += nlabels;
1223 /* Make sure that the last case is the default label, as one is required.
1224 Then sort the labels, which is also required in GIMPLE. */
1225 CASE_LOW (last_case) = NULL;
1226 sort_case_labels (case_label_vec);
1228 /* Need to link switch_stmt after running replace_goto_queue due
1229 to not wanting to process the same goto stmts twice. */
1230 append_to_statement_list (switch_stmt, tf->top_p);
1231 append_to_statement_list (switch_body, tf->top_p);
1234 /* Decide whether or not we are going to duplicate the finally block.
1235 There are several considerations.
1237 First, if this is Java, then the finally block contains code
1238 written by the user. It has line numbers associated with it,
1239 so duplicating the block means it's difficult to set a breakpoint.
1240 Since controlling code generation via -g is verboten, we simply
1241 never duplicate code without optimization.
1243 Second, we'd like to prevent egregious code growth. One way to
1244 do this is to estimate the size of the finally block, multiply
1245 that by the number of copies we'd need to make, and compare against
1246 the estimate of the size of the switch machinery we'd have to add. */
1248 static bool
1249 decide_copy_try_finally (int ndests, tree finally)
1251 int f_estimate, sw_estimate;
1253 if (!optimize)
1254 return false;
1256 /* Finally estimate N times, plus N gotos. */
1257 f_estimate = estimate_num_insns (finally);
1258 f_estimate = (f_estimate + 1) * ndests;
1260 /* Switch statement (cost 10), N variable assignments, N gotos. */
1261 sw_estimate = 10 + 2 * ndests;
1263 /* Optimize for size clearly wants our best guess. */
1264 if (optimize_size)
1265 return f_estimate < sw_estimate;
1267 /* ??? These numbers are completely made up so far. */
1268 if (optimize > 1)
1269 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1270 else
1271 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1274 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1275 to a sequence of labels and blocks, plus the exception region trees
1276 that record all the magic. This is complicated by the need to
1277 arrange for the FINALLY block to be executed on all exits. */
1279 static void
1280 lower_try_finally (struct leh_state *state, tree *tp)
1282 struct leh_tf_state this_tf;
1283 struct leh_state this_state;
1284 int ndests;
1286 /* Process the try block. */
1288 memset (&this_tf, 0, sizeof (this_tf));
1289 this_tf.try_finally_expr = *tp;
1290 this_tf.top_p = tp;
1291 this_tf.outer = state;
1292 if (using_eh_for_cleanups_p)
1293 this_tf.region
1294 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1295 else
1296 this_tf.region = NULL;
1298 this_state.cur_region = this_tf.region;
1299 this_state.prev_try = state->prev_try;
1300 this_state.tf = &this_tf;
1302 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1304 /* Determine if the try block is escaped through the bottom. */
1305 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1307 /* Determine if any exceptions are possible within the try block. */
1308 if (using_eh_for_cleanups_p)
1309 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1310 if (this_tf.may_throw)
1312 this_tf.eh_label = create_artificial_label ();
1313 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1314 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1317 /* Sort the goto queue for efficient searching later. */
1318 if (this_tf.goto_queue_active > 1)
1319 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1320 sizeof (struct goto_queue_node), goto_queue_cmp);
1322 /* Determine how many edges (still) reach the finally block. Or rather,
1323 how many destinations are reached by the finally block. Use this to
1324 determine how we process the finally block itself. */
1326 if (this_tf.dest_array)
1327 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1328 else
1329 ndests = 0;
1330 ndests += this_tf.may_fallthru;
1331 ndests += this_tf.may_return;
1332 ndests += this_tf.may_throw;
1334 /* If the FINALLY block is not reachable, dike it out. */
1335 if (ndests == 0)
1336 *tp = TREE_OPERAND (*tp, 0);
1338 /* If the finally block doesn't fall through, then any destination
1339 we might try to impose there isn't reached either. There may be
1340 some minor amount of cleanup and redirection still needed. */
1341 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1342 lower_try_finally_nofallthru (state, &this_tf);
1344 /* We can easily special-case redirection to a single destination. */
1345 else if (ndests == 1)
1346 lower_try_finally_onedest (state, &this_tf);
1348 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1349 lower_try_finally_copy (state, &this_tf);
1350 else
1351 lower_try_finally_switch (state, &this_tf);
1353 /* If someone requested we add a label at the end of the transformed
1354 block, do so. */
1355 if (this_tf.fallthru_label)
1357 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1358 append_to_statement_list (x, tp);
1361 if (this_tf.goto_queue)
1362 free (this_tf.goto_queue);
1365 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1366 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1367 exception region trees that record all the magic. */
1369 static void
1370 lower_catch (struct leh_state *state, tree *tp)
1372 struct eh_region *try_region;
1373 struct leh_state this_state;
1374 tree_stmt_iterator i;
1375 tree out_label;
1377 try_region = gen_eh_region_try (state->cur_region);
1378 this_state.cur_region = try_region;
1379 this_state.prev_try = try_region;
1380 this_state.tf = state->tf;
1382 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1384 if (!get_eh_region_may_contain_throw (try_region))
1386 *tp = TREE_OPERAND (*tp, 0);
1387 return;
1390 out_label = NULL;
1391 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1393 struct eh_region *catch_region;
1394 tree catch, x, eh_label;
1396 catch = tsi_stmt (i);
1397 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1399 this_state.cur_region = catch_region;
1400 this_state.prev_try = state->prev_try;
1401 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1403 eh_label = create_artificial_label ();
1404 set_eh_region_tree_label (catch_region, eh_label);
1406 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1407 tsi_link_before (&i, x, TSI_SAME_STMT);
1409 if (block_may_fallthru (CATCH_BODY (catch)))
1411 if (!out_label)
1412 out_label = create_artificial_label ();
1414 x = build1 (GOTO_EXPR, void_type_node, out_label);
1415 append_to_statement_list (x, &CATCH_BODY (catch));
1418 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1419 tsi_delink (&i);
1422 frob_into_branch_around (tp, NULL, out_label);
1425 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1426 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1427 region trees that record all the magic. */
1429 static void
1430 lower_eh_filter (struct leh_state *state, tree *tp)
1432 struct leh_state this_state;
1433 struct eh_region *this_region;
1434 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1435 tree eh_label;
1437 if (EH_FILTER_MUST_NOT_THROW (inner))
1438 this_region = gen_eh_region_must_not_throw (state->cur_region);
1439 else
1440 this_region = gen_eh_region_allowed (state->cur_region,
1441 EH_FILTER_TYPES (inner));
1442 this_state = *state;
1443 this_state.cur_region = this_region;
1445 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1447 if (!get_eh_region_may_contain_throw (this_region))
1449 *tp = TREE_OPERAND (*tp, 0);
1450 return;
1453 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1454 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1456 eh_label = create_artificial_label ();
1457 set_eh_region_tree_label (this_region, eh_label);
1459 frob_into_branch_around (tp, eh_label, NULL);
1462 /* Implement a cleanup expression. This is similar to try-finally,
1463 except that we only execute the cleanup block for exception edges. */
1465 static void
1466 lower_cleanup (struct leh_state *state, tree *tp)
1468 struct leh_state this_state;
1469 struct eh_region *this_region;
1470 struct leh_tf_state fake_tf;
1472 /* If not using eh, then exception-only cleanups are no-ops. */
1473 if (!flag_exceptions)
1475 *tp = TREE_OPERAND (*tp, 0);
1476 lower_eh_constructs_1 (state, tp);
1477 return;
1480 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1481 this_state = *state;
1482 this_state.cur_region = this_region;
1484 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1486 if (!get_eh_region_may_contain_throw (this_region))
1488 *tp = TREE_OPERAND (*tp, 0);
1489 return;
1492 /* Build enough of a try-finally state so that we can reuse
1493 honor_protect_cleanup_actions. */
1494 memset (&fake_tf, 0, sizeof (fake_tf));
1495 fake_tf.top_p = tp;
1496 fake_tf.outer = state;
1497 fake_tf.region = this_region;
1498 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1499 fake_tf.may_throw = true;
1501 fake_tf.eh_label = create_artificial_label ();
1502 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1504 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1506 if (fake_tf.may_throw)
1508 /* In this case honor_protect_cleanup_actions had nothing to do,
1509 and we should process this normally. */
1510 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1511 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1513 else
1515 /* In this case honor_protect_cleanup_actions did nearly all of
1516 the work. All we have left is to append the fallthru_label. */
1518 *tp = TREE_OPERAND (*tp, 0);
1519 if (fake_tf.fallthru_label)
1521 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1522 append_to_statement_list (x, tp);
1527 /* Main loop for lowering eh constructs. */
1529 static void
1530 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1532 tree_stmt_iterator i;
1533 tree t = *tp;
1535 switch (TREE_CODE (t))
1537 case COND_EXPR:
1538 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1539 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1540 break;
1542 case CALL_EXPR:
1543 /* Look for things that can throw exceptions, and record them. */
1544 if (state->cur_region && tree_could_throw_p (t))
1546 record_stmt_eh_region (state->cur_region, t);
1547 note_eh_region_may_contain_throw (state->cur_region);
1549 break;
1551 case MODIFY_EXPR:
1552 /* Look for things that can throw exceptions, and record them. */
1553 if (state->cur_region && tree_could_throw_p (t))
1555 record_stmt_eh_region (state->cur_region, t);
1556 note_eh_region_may_contain_throw (state->cur_region);
1558 /* ??? For the benefit of calls.c, converting all this to rtl,
1559 we need to record the call expression, not just the outer
1560 modify statement. */
1561 if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
1562 record_stmt_eh_region (state->cur_region, TREE_OPERAND (t, 1));
1564 break;
1566 case GOTO_EXPR:
1567 case RETURN_EXPR:
1568 maybe_record_in_goto_queue (state, t);
1569 break;
1570 case SWITCH_EXPR:
1571 verify_norecord_switch_expr (state, t);
1572 break;
1574 case TRY_FINALLY_EXPR:
1575 lower_try_finally (state, tp);
1576 break;
1578 case TRY_CATCH_EXPR:
1579 i = tsi_start (TREE_OPERAND (t, 1));
1580 switch (TREE_CODE (tsi_stmt (i)))
1582 case CATCH_EXPR:
1583 lower_catch (state, tp);
1584 break;
1585 case EH_FILTER_EXPR:
1586 lower_eh_filter (state, tp);
1587 break;
1588 default:
1589 lower_cleanup (state, tp);
1590 break;
1592 break;
1594 case STATEMENT_LIST:
1595 for (i = tsi_start (t); !tsi_end_p (i); )
1597 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1598 t = tsi_stmt (i);
1599 if (TREE_CODE (t) == STATEMENT_LIST)
1601 tsi_link_before (&i, t, TSI_SAME_STMT);
1602 tsi_delink (&i);
1604 else
1605 tsi_next (&i);
1607 break;
1609 default:
1610 /* A type, a decl, or some kind of statement that we're not
1611 interested in. Don't walk them. */
1612 break;
1616 static void
1617 lower_eh_constructs (void)
1619 struct leh_state null_state;
1620 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1622 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1623 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1624 ggc_free);
1626 collect_finally_tree (*tp, NULL);
1628 memset (&null_state, 0, sizeof (null_state));
1629 lower_eh_constructs_1 (&null_state, tp);
1631 htab_delete (finally_tree);
1633 collect_eh_region_array ();
1636 struct tree_opt_pass pass_lower_eh =
1638 "eh", /* name */
1639 NULL, /* gate */
1640 lower_eh_constructs, /* execute */
1641 NULL, /* sub */
1642 NULL, /* next */
1643 0, /* static_pass_number */
1644 TV_TREE_EH, /* tv_id */
1645 PROP_gimple_lcf, /* properties_required */
1646 PROP_gimple_leh, /* properties_provided */
1647 PROP_gimple_lcf, /* properties_destroyed */
1648 0, /* todo_flags_start */
1649 TODO_dump_func /* todo_flags_finish */
1653 /* Construct EH edges for STMT. */
1655 static void
1656 make_eh_edge (struct eh_region *region, void *data)
1658 tree stmt, lab;
1659 basic_block src, dst;
1661 stmt = data;
1662 lab = get_eh_region_tree_label (region);
1664 src = bb_for_stmt (stmt);
1665 dst = label_to_block (lab);
1667 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1670 void
1671 make_eh_edges (tree stmt)
1673 int region_nr;
1674 bool is_resx;
1676 if (TREE_CODE (stmt) == RESX_EXPR)
1678 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1679 is_resx = true;
1681 else
1683 region_nr = lookup_stmt_eh_region (stmt);
1684 if (region_nr < 0)
1685 return;
1686 is_resx = false;
1689 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1694 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1695 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1696 This routine expects only GIMPLE lhs or rhs input. */
1698 bool
1699 tree_could_trap_p (tree expr)
1701 enum tree_code code = TREE_CODE (expr);
1702 bool honor_nans = false;
1703 bool honor_snans = false;
1704 bool fp_operation = false;
1705 tree t;
1707 if (TREE_CODE_CLASS (code) == '<'
1708 || TREE_CODE_CLASS (code) == '1'
1709 || TREE_CODE_CLASS (code) == '2')
1711 t = TREE_TYPE (expr);
1712 fp_operation = FLOAT_TYPE_P (t);
1713 if (fp_operation)
1715 honor_nans = flag_trapping_math && !flag_finite_math_only;
1716 honor_snans = flag_signaling_nans != 0;
1720 switch (code)
1722 case ARRAY_REF:
1723 case ARRAY_RANGE_REF:
1724 case COMPONENT_REF:
1725 case REALPART_EXPR:
1726 case IMAGPART_EXPR:
1727 case BIT_FIELD_REF:
1728 t = get_base_address (expr);
1729 return !t || tree_could_trap_p (t);
1731 case INDIRECT_REF:
1732 return !TREE_THIS_NOTRAP (expr);
1734 case ASM_EXPR:
1735 return TREE_THIS_VOLATILE (expr);
1737 case TRUNC_DIV_EXPR:
1738 case CEIL_DIV_EXPR:
1739 case FLOOR_DIV_EXPR:
1740 case ROUND_DIV_EXPR:
1741 case EXACT_DIV_EXPR:
1742 case CEIL_MOD_EXPR:
1743 case FLOOR_MOD_EXPR:
1744 case ROUND_MOD_EXPR:
1745 case TRUNC_MOD_EXPR:
1746 case RDIV_EXPR:
1747 if (honor_snans)
1748 return true;
1749 if (fp_operation && flag_trapping_math)
1750 return true;
1751 t = TREE_OPERAND (expr, 1);
1752 if (!TREE_CONSTANT (t) || integer_zerop (t))
1753 return true;
1754 return false;
1756 case LT_EXPR:
1757 case LE_EXPR:
1758 case GT_EXPR:
1759 case GE_EXPR:
1760 case LTGT_EXPR:
1761 /* Some floating point comparisons may trap. */
1762 return honor_nans;
1764 case EQ_EXPR:
1765 case NE_EXPR:
1766 case UNORDERED_EXPR:
1767 case ORDERED_EXPR:
1768 case UNLT_EXPR:
1769 case UNLE_EXPR:
1770 case UNGT_EXPR:
1771 case UNGE_EXPR:
1772 case UNEQ_EXPR:
1773 return honor_snans;
1775 case CONVERT_EXPR:
1776 case FIX_TRUNC_EXPR:
1777 case FIX_CEIL_EXPR:
1778 case FIX_FLOOR_EXPR:
1779 case FIX_ROUND_EXPR:
1780 /* Conversion of floating point might trap. */
1781 return honor_nans;
1783 case NEGATE_EXPR:
1784 case ABS_EXPR:
1785 case CONJ_EXPR:
1786 /* These operations don't trap even with floating point. */
1787 return false;
1789 default:
1790 /* Any floating arithmetic may trap. */
1791 if (fp_operation && flag_trapping_math)
1792 return true;
1793 return false;
1797 bool
1798 tree_could_throw_p (tree t)
1800 if (!flag_exceptions)
1801 return false;
1802 if (TREE_CODE (t) == MODIFY_EXPR)
1804 if (flag_non_call_exceptions
1805 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1806 return true;
1807 t = TREE_OPERAND (t, 1);
1810 if (TREE_CODE (t) == CALL_EXPR)
1811 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1812 if (flag_non_call_exceptions)
1813 return tree_could_trap_p (t);
1814 return false;
1817 bool
1818 tree_can_throw_internal (tree stmt)
1820 int region_nr = lookup_stmt_eh_region (stmt);
1821 if (region_nr < 0)
1822 return false;
1823 return can_throw_internal_1 (region_nr);
1826 bool
1827 tree_can_throw_external (tree stmt)
1829 int region_nr = lookup_stmt_eh_region (stmt);
1830 if (region_nr < 0)
1831 return false;
1832 return can_throw_external_1 (region_nr);
1835 bool
1836 maybe_clean_eh_stmt (tree stmt)
1838 if (!tree_could_throw_p (stmt))
1839 if (remove_stmt_from_eh_region (stmt))
1840 return true;
1841 return false;
1844 #include "gt-tree-eh.h"