1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p
= 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p
= 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a
, const void *b
)
59 const void * const * x
= (const void * const *) a
;
60 const void * const * y
= (const void * const *) b
;
65 struct_ptr_hash (const void *a
)
67 const void * const * x
= (const void * const *) a
;
68 return (size_t)*x
>> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
85 record_stmt_eh_region (struct eh_region
*region
, tree t
)
90 add_stmt_to_eh_region (t
, get_eh_region_number (region
));
94 add_stmt_to_eh_region_fn (struct function
*ifun
, tree t
, int num
)
96 struct throw_stmt_node
*n
;
99 gcc_assert (num
>= 0);
100 gcc_assert (TREE_CODE (t
) != RESX_EXPR
);
102 n
= GGC_NEW (struct throw_stmt_node
);
106 if (!get_eh_throw_stmt_table (ifun
))
107 set_eh_throw_stmt_table (ifun
, htab_create_ggc (31, struct_ptr_hash
,
111 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), n
, INSERT
);
114 /* ??? For the benefit of calls.c, converting all this to rtl,
115 we need to record the call expression, not just the outer
117 if (TREE_CODE (t
) == MODIFY_EXPR
118 && (t
= get_call_expr_in (t
)))
119 add_stmt_to_eh_region_fn (ifun
, t
, num
);
123 add_stmt_to_eh_region (tree t
, int num
)
125 add_stmt_to_eh_region_fn (cfun
, t
, num
);
129 remove_stmt_from_eh_region_fn (struct function
*ifun
, tree t
)
131 struct throw_stmt_node dummy
;
134 if (!get_eh_throw_stmt_table (ifun
))
138 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), &dummy
,
142 htab_clear_slot (get_eh_throw_stmt_table (ifun
), slot
);
143 /* ??? For the benefit of calls.c, converting all this to rtl,
144 we need to record the call expression, not just the outer
146 if (TREE_CODE (t
) == MODIFY_EXPR
147 && (t
= get_call_expr_in (t
)))
148 remove_stmt_from_eh_region_fn (ifun
, t
);
156 remove_stmt_from_eh_region (tree t
)
158 return remove_stmt_from_eh_region_fn (cfun
, t
);
162 lookup_stmt_eh_region_fn (struct function
*ifun
, tree t
)
164 struct throw_stmt_node
*p
, n
;
166 if (!get_eh_throw_stmt_table (ifun
))
170 p
= (struct throw_stmt_node
*) htab_find (get_eh_throw_stmt_table (ifun
),
173 return (p
? p
->region_nr
: -1);
177 lookup_stmt_eh_region (tree t
)
179 /* We can get called from initialized data when -fnon-call-exceptions
180 is on; prevent crash. */
183 return lookup_stmt_eh_region_fn (cfun
, t
);
187 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
188 nodes and LABEL_DECL nodes. We will use this during the second phase to
189 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
191 struct finally_tree_node
196 /* Note that this table is *not* marked GTY. It is short-lived. */
197 static htab_t finally_tree
;
200 record_in_finally_tree (tree child
, tree parent
)
202 struct finally_tree_node
*n
;
205 n
= XNEW (struct finally_tree_node
);
209 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
215 collect_finally_tree (tree t
, tree region
)
218 switch (TREE_CODE (t
))
221 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
224 case TRY_FINALLY_EXPR
:
225 record_in_finally_tree (t
, region
);
226 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
227 t
= TREE_OPERAND (t
, 1);
231 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
232 t
= TREE_OPERAND (t
, 1);
240 t
= EH_FILTER_FAILURE (t
);
245 tree_stmt_iterator i
;
246 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
247 collect_finally_tree (tsi_stmt (i
), region
);
252 /* A type, a decl, or some kind of statement that we're not
253 interested in. Don't walk them. */
258 /* Use the finally tree to determine if a jump from START to TARGET
259 would leave the try_finally node that START lives in. */
262 outside_finally_tree (tree start
, tree target
)
264 struct finally_tree_node n
, *p
;
269 p
= (struct finally_tree_node
*) htab_find (finally_tree
, &n
);
274 while (start
!= target
);
279 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
280 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
281 The eh region creation is straight-forward, but frobbing all the gotos
282 and such into shape isn't. */
284 /* State of the world while lowering. */
288 /* What's "current" while constructing the eh region tree. These
289 correspond to variables of the same name in cfun->eh, which we
290 don't have easy access to. */
291 struct eh_region
*cur_region
;
292 struct eh_region
*prev_try
;
294 /* Processing of TRY_FINALLY requires a bit more state. This is
295 split out into a separate structure so that we don't have to
296 copy so much when processing other nodes. */
297 struct leh_tf_state
*tf
;
302 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
303 is the original TRY_FINALLY_EXPR. We need to retain this so that
304 outside_finally_tree can reliably reference the tree used in the
305 collect_finally_tree data structures. */
306 tree try_finally_expr
;
309 /* The state outside this try_finally node. */
310 struct leh_state
*outer
;
312 /* The exception region created for it. */
313 struct eh_region
*region
;
315 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
316 that are seen to escape this TRY_FINALLY_EXPR node. */
317 struct goto_queue_node
{
323 size_t goto_queue_size
;
324 size_t goto_queue_active
;
326 /* The set of unique labels seen as entries in the goto queue. */
327 VEC(tree
,heap
) *dest_array
;
329 /* A label to be added at the end of the completed transformed
330 sequence. It will be set if may_fallthru was true *at one time*,
331 though subsequent transformations may have cleared that flag. */
334 /* A label that has been registered with except.c to be the
335 landing pad for this try block. */
338 /* True if it is possible to fall out the bottom of the try block.
339 Cleared if the fallthru is converted to a goto. */
342 /* True if any entry in goto_queue is a RETURN_EXPR. */
345 /* True if the finally block can receive an exception edge.
346 Cleared if the exception case is handled by code duplication. */
350 static void lower_eh_filter (struct leh_state
*, tree
*);
351 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
353 /* Comparison function for qsort/bsearch. We're interested in
354 searching goto queue elements for source statements. */
357 goto_queue_cmp (const void *x
, const void *y
)
359 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
360 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
361 return (a
== b
? 0 : a
< b
? -1 : 1);
364 /* Search for STMT in the goto queue. Return the replacement,
365 or null if the statement isn't in the queue. */
368 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
370 struct goto_queue_node tmp
, *ret
;
372 ret
= (struct goto_queue_node
*)
373 bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
374 sizeof (struct goto_queue_node
), goto_queue_cmp
);
375 return (ret
? ret
->repl_stmt
: NULL
);
378 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
379 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
380 then we can just splat it in, otherwise we add the new stmts immediately
381 after the COND_EXPR and redirect. */
384 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
385 tree_stmt_iterator
*tsi
)
387 tree
new, one
, label
;
389 new = find_goto_replacement (tf
, *tp
);
393 one
= expr_only (new);
394 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
400 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
401 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
403 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
404 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
407 /* The real work of replace_goto_queue. Returns with TSI updated to
408 point to the next statement. */
410 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
413 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
415 switch (TREE_CODE (t
))
419 t
= find_goto_replacement (tf
, t
);
422 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
429 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
430 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
433 case TRY_FINALLY_EXPR
:
435 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
436 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
439 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
442 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
449 /* These won't have gotos in them. */
456 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
459 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
461 tree_stmt_iterator i
= tsi_start (t
);
462 while (!tsi_end_p (i
))
463 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
466 /* Replace all goto queue members. */
469 replace_goto_queue (struct leh_tf_state
*tf
)
471 if (tf
->goto_queue_active
== 0)
473 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
476 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
477 node, and if so record that fact in the goto queue associated with that
481 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
483 struct leh_tf_state
*tf
= state
->tf
;
484 struct goto_queue_node
*q
;
491 switch (TREE_CODE (stmt
))
495 tree lab
= GOTO_DESTINATION (stmt
);
497 /* Computed and non-local gotos do not get processed. Given
498 their nature we can neither tell whether we've escaped the
499 finally block nor redirect them if we knew. */
500 if (TREE_CODE (lab
) != LABEL_DECL
)
503 /* No need to record gotos that don't leave the try block. */
504 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
507 if (! tf
->dest_array
)
509 tf
->dest_array
= VEC_alloc (tree
, heap
, 10);
510 VEC_quick_push (tree
, tf
->dest_array
, lab
);
515 int n
= VEC_length (tree
, tf
->dest_array
);
516 for (index
= 0; index
< n
; ++index
)
517 if (VEC_index (tree
, tf
->dest_array
, index
) == lab
)
520 VEC_safe_push (tree
, heap
, tf
->dest_array
, lab
);
526 tf
->may_return
= true;
534 active
= tf
->goto_queue_active
;
535 size
= tf
->goto_queue_size
;
538 size
= (size
? size
* 2 : 32);
539 tf
->goto_queue_size
= size
;
541 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
544 q
= &tf
->goto_queue
[active
];
545 tf
->goto_queue_active
= active
+ 1;
547 memset (q
, 0, sizeof (*q
));
552 #ifdef ENABLE_CHECKING
553 /* We do not process SWITCH_EXPRs for now. As long as the original source
554 was in fact structured, and we've not yet done jump threading, then none
555 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
558 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
560 struct leh_tf_state
*tf
= state
->tf
;
567 vec
= SWITCH_LABELS (switch_expr
);
568 n
= TREE_VEC_LENGTH (vec
);
570 for (i
= 0; i
< n
; ++i
)
572 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
573 gcc_assert (!outside_finally_tree (lab
, tf
->try_finally_expr
));
577 #define verify_norecord_switch_expr(state, switch_expr)
580 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
581 whatever is needed to finish the return. If MOD is non-null, insert it
582 before the new branch. RETURN_VALUE_P is a cache containing a temporary
583 variable to be used in manipulating the value returned from the function. */
586 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
587 tree
*return_value_p
)
589 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
594 /* The nasty part about redirecting the return value is that the
595 return value itself is to be computed before the FINALLY block
609 should return 0, not 1. Arrange for this to happen by copying
610 computed the return value into a local temporary. This also
611 allows us to redirect multiple return statements through the
612 same destination block; whether this is a net win or not really
613 depends, I guess, but it does make generation of the switch in
614 lower_try_finally_switch easier. */
616 switch (TREE_CODE (ret_expr
))
619 if (!*return_value_p
)
620 *return_value_p
= ret_expr
;
622 gcc_assert (*return_value_p
== ret_expr
);
623 q
->cont_stmt
= q
->stmt
;
628 tree result
= TREE_OPERAND (ret_expr
, 0);
629 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
631 if (!*return_value_p
)
633 if (aggregate_value_p (TREE_TYPE (result
),
634 TREE_TYPE (current_function_decl
)))
635 /* If this function returns in memory, copy the argument
636 into the return slot now. Otherwise, we might need to
637 worry about magic return semantics, so we need to use a
638 temporary to hold the value until we're actually ready
642 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
643 *return_value_p
= new;
646 new = *return_value_p
;
648 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
649 append_to_statement_list (x
, &q
->repl_stmt
);
654 x
= build2 (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
655 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
664 /* If we don't return a value, all return statements are the same. */
665 q
->cont_stmt
= q
->stmt
;
669 append_to_statement_list (mod
, &q
->repl_stmt
);
671 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
672 append_to_statement_list (x
, &q
->repl_stmt
);
675 /* Similar, but easier, for GOTO_EXPR. */
678 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
682 q
->cont_stmt
= q
->stmt
;
684 append_to_statement_list (mod
, &q
->repl_stmt
);
686 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
687 append_to_statement_list (x
, &q
->repl_stmt
);
690 /* We want to transform
691 try { body; } catch { stuff; }
693 body; goto over; lab: stuff; over:
695 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
696 should be placed before the second operand, or NULL. OVER is
697 an existing label that should be put at the exit, or NULL. */
700 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
704 op1
= TREE_OPERAND (*tp
, 1);
705 *tp
= TREE_OPERAND (*tp
, 0);
707 if (block_may_fallthru (*tp
))
710 over
= create_artificial_label ();
711 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
712 append_to_statement_list (x
, tp
);
717 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
718 append_to_statement_list (x
, tp
);
721 append_to_statement_list (op1
, tp
);
725 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
726 append_to_statement_list (x
, tp
);
730 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
731 Make sure to record all new labels found. */
734 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
738 t
= unsave_expr_now (t
);
741 region
= outer_state
->tf
->try_finally_expr
;
742 collect_finally_tree (t
, region
);
747 /* A subroutine of lower_try_finally. Create a fallthru label for
748 the given try_finally state. The only tricky bit here is that
749 we have to make sure to record the label in our outer context. */
752 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
754 tree label
= tf
->fallthru_label
;
757 label
= create_artificial_label ();
758 tf
->fallthru_label
= label
;
760 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
765 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
766 returns non-null, then the language requires that the exception path out
767 of a try_finally be treated specially. To wit: the code within the
768 finally block may not itself throw an exception. We have two choices here.
769 First we can duplicate the finally block and wrap it in a must_not_throw
770 region. Second, we can generate code like
775 if (fintmp == eh_edge)
776 protect_cleanup_actions;
779 where "fintmp" is the temporary used in the switch statement generation
780 alternative considered below. For the nonce, we always choose the first
783 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
786 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
787 struct leh_state
*this_state
,
788 struct leh_tf_state
*tf
)
790 tree protect_cleanup_actions
, finally
, x
;
791 tree_stmt_iterator i
;
792 bool finally_may_fallthru
;
794 /* First check for nothing to do. */
795 if (lang_protect_cleanup_actions
)
796 protect_cleanup_actions
= lang_protect_cleanup_actions ();
798 protect_cleanup_actions
= NULL
;
800 finally
= TREE_OPERAND (*tf
->top_p
, 1);
802 /* If the EH case of the finally block can fall through, this may be a
803 structure of the form
816 E.g. with an inline destructor with an embedded try block. In this
817 case we must save the runtime EH data around the nested exception.
819 This complication means that any time the previous runtime data might
820 be used (via fallthru from the finally) we handle the eh case here,
821 whether or not protect_cleanup_actions is active. */
823 finally_may_fallthru
= block_may_fallthru (finally
);
824 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
827 /* Duplicate the FINALLY block. Only need to do this for try-finally,
828 and not for cleanups. */
830 finally
= lower_try_finally_dup_block (finally
, outer_state
);
832 /* Resume execution after the exception. Adding this now lets
833 lower_eh_filter not add unnecessary gotos, as it is clear that
834 we never fallthru from this copy of the finally block. */
835 if (finally_may_fallthru
)
837 tree save_eptr
, save_filt
;
839 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
840 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
842 i
= tsi_start (finally
);
843 x
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
844 x
= build2 (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
845 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
847 x
= build0 (FILTER_EXPR
, integer_type_node
);
848 x
= build2 (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
849 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
851 i
= tsi_last (finally
);
852 x
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
853 x
= build2 (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
854 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
856 x
= build0 (FILTER_EXPR
, integer_type_node
);
857 x
= build2 (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
858 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
860 x
= build_resx (get_eh_region_number (tf
->region
));
861 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
864 /* Wrap the block with protect_cleanup_actions as the action. */
865 if (protect_cleanup_actions
)
867 x
= build2 (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
868 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
869 EH_FILTER_MUST_NOT_THROW (x
) = 1;
870 finally
= build2 (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
871 lower_eh_filter (outer_state
, &finally
);
874 lower_eh_constructs_1 (outer_state
, &finally
);
876 /* Hook this up to the end of the existing try block. If we
877 previously fell through the end, we'll have to branch around.
878 This means adding a new goto, and adding it to the queue. */
880 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
882 if (tf
->may_fallthru
)
884 x
= lower_try_finally_fallthru_label (tf
);
885 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
886 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
889 maybe_record_in_goto_queue (this_state
, x
);
891 tf
->may_fallthru
= false;
894 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
895 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
896 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
898 /* Having now been handled, EH isn't to be considered with
899 the rest of the outgoing edges. */
900 tf
->may_throw
= false;
903 /* A subroutine of lower_try_finally. We have determined that there is
904 no fallthru edge out of the finally block. This means that there is
905 no outgoing edge corresponding to any incoming edge. Restructure the
906 try_finally node for this special case. */
909 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
911 tree x
, finally
, lab
, return_val
;
912 struct goto_queue_node
*q
, *qe
;
917 lab
= create_artificial_label ();
919 finally
= TREE_OPERAND (*tf
->top_p
, 1);
920 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
922 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
923 append_to_statement_list (x
, tf
->top_p
);
927 qe
= q
+ tf
->goto_queue_active
;
930 do_return_redirection (q
, lab
, NULL
, &return_val
);
932 do_goto_redirection (q
, lab
, NULL
);
934 replace_goto_queue (tf
);
936 lower_eh_constructs_1 (state
, &finally
);
937 append_to_statement_list (finally
, tf
->top_p
);
940 /* A subroutine of lower_try_finally. We have determined that there is
941 exactly one destination of the finally block. Restructure the
942 try_finally node for this special case. */
945 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
947 struct goto_queue_node
*q
, *qe
;
948 tree x
, finally
, finally_label
;
950 finally
= TREE_OPERAND (*tf
->top_p
, 1);
951 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
953 lower_eh_constructs_1 (state
, &finally
);
957 /* Only reachable via the exception edge. Add the given label to
958 the head of the FINALLY block. Append a RESX at the end. */
960 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
961 append_to_statement_list (x
, tf
->top_p
);
963 append_to_statement_list (finally
, tf
->top_p
);
965 x
= build_resx (get_eh_region_number (tf
->region
));
967 append_to_statement_list (x
, tf
->top_p
);
972 if (tf
->may_fallthru
)
974 /* Only reachable via the fallthru edge. Do nothing but let
975 the two blocks run together; we'll fall out the bottom. */
976 append_to_statement_list (finally
, tf
->top_p
);
980 finally_label
= create_artificial_label ();
981 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
982 append_to_statement_list (x
, tf
->top_p
);
984 append_to_statement_list (finally
, tf
->top_p
);
987 qe
= q
+ tf
->goto_queue_active
;
991 /* Reachable by return expressions only. Redirect them. */
992 tree return_val
= NULL
;
994 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
995 replace_goto_queue (tf
);
999 /* Reachable by goto expressions only. Redirect them. */
1001 do_goto_redirection (q
, finally_label
, NULL
);
1002 replace_goto_queue (tf
);
1004 if (VEC_index (tree
, tf
->dest_array
, 0) == tf
->fallthru_label
)
1006 /* Reachable by goto to fallthru label only. Redirect it
1007 to the new label (already created, sadly), and do not
1008 emit the final branch out, or the fallthru label. */
1009 tf
->fallthru_label
= NULL
;
1014 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
1015 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
1018 /* A subroutine of lower_try_finally. There are multiple edges incoming
1019 and outgoing from the finally block. Implement this by duplicating the
1020 finally block for every destination. */
1023 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1025 tree finally
, new_stmt
;
1028 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1029 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1031 new_stmt
= NULL_TREE
;
1033 if (tf
->may_fallthru
)
1035 x
= lower_try_finally_dup_block (finally
, state
);
1036 lower_eh_constructs_1 (state
, &x
);
1037 append_to_statement_list (x
, &new_stmt
);
1039 x
= lower_try_finally_fallthru_label (tf
);
1040 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1041 append_to_statement_list (x
, &new_stmt
);
1046 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1047 append_to_statement_list (x
, &new_stmt
);
1049 x
= lower_try_finally_dup_block (finally
, state
);
1050 lower_eh_constructs_1 (state
, &x
);
1051 append_to_statement_list (x
, &new_stmt
);
1053 x
= build_resx (get_eh_region_number (tf
->region
));
1054 append_to_statement_list (x
, &new_stmt
);
1059 struct goto_queue_node
*q
, *qe
;
1060 tree return_val
= NULL
;
1061 int return_index
, index
;
1064 struct goto_queue_node
*q
;
1068 return_index
= VEC_length (tree
, tf
->dest_array
);
1069 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1072 qe
= q
+ tf
->goto_queue_active
;
1075 index
= q
->index
< 0 ? return_index
: q
->index
;
1077 if (!labels
[index
].q
)
1078 labels
[index
].q
= q
;
1081 for (index
= 0; index
< return_index
+ 1; index
++)
1085 q
= labels
[index
].q
;
1089 lab
= labels
[index
].label
= create_artificial_label ();
1091 if (index
== return_index
)
1092 do_return_redirection (q
, lab
, NULL
, &return_val
);
1094 do_goto_redirection (q
, lab
, NULL
);
1096 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1097 append_to_statement_list (x
, &new_stmt
);
1099 x
= lower_try_finally_dup_block (finally
, state
);
1100 lower_eh_constructs_1 (state
, &x
);
1101 append_to_statement_list (x
, &new_stmt
);
1103 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1104 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1107 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1111 index
= q
->index
< 0 ? return_index
: q
->index
;
1113 if (labels
[index
].q
== q
)
1116 lab
= labels
[index
].label
;
1118 if (index
== return_index
)
1119 do_return_redirection (q
, lab
, NULL
, &return_val
);
1121 do_goto_redirection (q
, lab
, NULL
);
1124 replace_goto_queue (tf
);
1128 /* Need to link new stmts after running replace_goto_queue due
1129 to not wanting to process the same goto stmts twice. */
1130 append_to_statement_list (new_stmt
, tf
->top_p
);
1133 /* A subroutine of lower_try_finally. There are multiple edges incoming
1134 and outgoing from the finally block. Implement this by instrumenting
1135 each incoming edge and creating a switch statement at the end of the
1136 finally block that branches to the appropriate destination. */
1139 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1141 struct goto_queue_node
*q
, *qe
;
1142 tree return_val
= NULL
;
1143 tree finally
, finally_tmp
, finally_label
;
1144 int return_index
, eh_index
, fallthru_index
;
1145 int nlabels
, ndests
, j
, last_case_index
;
1146 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1149 /* Mash the TRY block to the head of the chain. */
1150 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1151 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1153 /* Lower the finally block itself. */
1154 lower_eh_constructs_1 (state
, &finally
);
1156 /* Prepare for switch statement generation. */
1157 nlabels
= VEC_length (tree
, tf
->dest_array
);
1158 return_index
= nlabels
;
1159 eh_index
= return_index
+ tf
->may_return
;
1160 fallthru_index
= eh_index
+ tf
->may_throw
;
1161 ndests
= fallthru_index
+ tf
->may_fallthru
;
1163 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1164 finally_label
= create_artificial_label ();
1166 case_label_vec
= make_tree_vec (ndests
);
1167 switch_stmt
= build3 (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1168 NULL_TREE
, case_label_vec
);
1171 last_case_index
= 0;
1173 /* Begin inserting code for getting to the finally block. Things
1174 are done in this order to correspond to the sequence the code is
1177 if (tf
->may_fallthru
)
1179 x
= build2 (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1180 build_int_cst (NULL_TREE
, fallthru_index
));
1181 append_to_statement_list (x
, tf
->top_p
);
1185 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1186 append_to_statement_list (x
, tf
->top_p
);
1190 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1191 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1192 create_artificial_label ());
1193 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1196 x
= build1 (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1197 append_to_statement_list (x
, &switch_body
);
1199 x
= lower_try_finally_fallthru_label (tf
);
1200 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1201 append_to_statement_list (x
, &switch_body
);
1206 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1207 append_to_statement_list (x
, tf
->top_p
);
1209 x
= build2 (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1210 build_int_cst (NULL_TREE
, eh_index
));
1211 append_to_statement_list (x
, tf
->top_p
);
1213 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1214 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1215 create_artificial_label ());
1216 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1219 x
= build1 (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1220 append_to_statement_list (x
, &switch_body
);
1221 x
= build_resx (get_eh_region_number (tf
->region
));
1222 append_to_statement_list (x
, &switch_body
);
1225 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1226 append_to_statement_list (x
, tf
->top_p
);
1228 append_to_statement_list (finally
, tf
->top_p
);
1230 /* Redirect each incoming goto edge. */
1232 qe
= q
+ tf
->goto_queue_active
;
1233 j
= last_case_index
+ tf
->may_return
;
1237 int switch_id
, case_index
;
1241 mod
= build2 (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1242 build_int_cst (NULL_TREE
, return_index
));
1243 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1244 switch_id
= return_index
;
1248 mod
= build2 (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1249 build_int_cst (NULL_TREE
, q
->index
));
1250 do_goto_redirection (q
, finally_label
, mod
);
1251 switch_id
= q
->index
;
1254 case_index
= j
+ q
->index
;
1255 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1256 TREE_VEC_ELT (case_label_vec
, case_index
)
1257 = build3 (CASE_LABEL_EXPR
, void_type_node
,
1258 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1259 /* We store the cont_stmt in the
1260 CASE_LABEL, so that we can recover it
1261 in the loop below. We don't create
1262 the new label while walking the
1263 goto_queue because pointers don't
1264 offer a stable order. */
1267 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1272 last_case
= TREE_VEC_ELT (case_label_vec
, j
);
1274 gcc_assert (last_case
);
1276 cont_stmt
= CASE_LABEL (last_case
);
1278 label
= create_artificial_label ();
1279 CASE_LABEL (last_case
) = label
;
1281 x
= build1 (LABEL_EXPR
, void_type_node
, label
);
1282 append_to_statement_list (x
, &switch_body
);
1283 append_to_statement_list (cont_stmt
, &switch_body
);
1284 maybe_record_in_goto_queue (state
, cont_stmt
);
1286 replace_goto_queue (tf
);
1288 /* Make sure that the last case is the default label, as one is required.
1289 Then sort the labels, which is also required in GIMPLE. */
1290 CASE_LOW (last_case
) = NULL
;
1291 sort_case_labels (case_label_vec
);
1293 /* Need to link switch_stmt after running replace_goto_queue due
1294 to not wanting to process the same goto stmts twice. */
1295 append_to_statement_list (switch_stmt
, tf
->top_p
);
1296 append_to_statement_list (switch_body
, tf
->top_p
);
1299 /* Decide whether or not we are going to duplicate the finally block.
1300 There are several considerations.
1302 First, if this is Java, then the finally block contains code
1303 written by the user. It has line numbers associated with it,
1304 so duplicating the block means it's difficult to set a breakpoint.
1305 Since controlling code generation via -g is verboten, we simply
1306 never duplicate code without optimization.
1308 Second, we'd like to prevent egregious code growth. One way to
1309 do this is to estimate the size of the finally block, multiply
1310 that by the number of copies we'd need to make, and compare against
1311 the estimate of the size of the switch machinery we'd have to add. */
1314 decide_copy_try_finally (int ndests
, tree finally
)
1316 int f_estimate
, sw_estimate
;
1321 /* Finally estimate N times, plus N gotos. */
1322 f_estimate
= estimate_num_insns (finally
);
1323 f_estimate
= (f_estimate
+ 1) * ndests
;
1325 /* Switch statement (cost 10), N variable assignments, N gotos. */
1326 sw_estimate
= 10 + 2 * ndests
;
1328 /* Optimize for size clearly wants our best guess. */
1330 return f_estimate
< sw_estimate
;
1332 /* ??? These numbers are completely made up so far. */
1334 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1336 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1339 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1340 to a sequence of labels and blocks, plus the exception region trees
1341 that record all the magic. This is complicated by the need to
1342 arrange for the FINALLY block to be executed on all exits. */
1345 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1347 struct leh_tf_state this_tf
;
1348 struct leh_state this_state
;
1351 /* Process the try block. */
1353 memset (&this_tf
, 0, sizeof (this_tf
));
1354 this_tf
.try_finally_expr
= *tp
;
1356 this_tf
.outer
= state
;
1357 if (using_eh_for_cleanups_p
)
1359 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1361 this_tf
.region
= NULL
;
1363 this_state
.cur_region
= this_tf
.region
;
1364 this_state
.prev_try
= state
->prev_try
;
1365 this_state
.tf
= &this_tf
;
1367 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1369 /* Determine if the try block is escaped through the bottom. */
1370 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1372 /* Determine if any exceptions are possible within the try block. */
1373 if (using_eh_for_cleanups_p
)
1374 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1375 if (this_tf
.may_throw
)
1377 this_tf
.eh_label
= create_artificial_label ();
1378 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1379 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1382 /* Sort the goto queue for efficient searching later. */
1383 if (this_tf
.goto_queue_active
> 1)
1384 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1385 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1387 /* Determine how many edges (still) reach the finally block. Or rather,
1388 how many destinations are reached by the finally block. Use this to
1389 determine how we process the finally block itself. */
1391 ndests
= VEC_length (tree
, this_tf
.dest_array
);
1392 ndests
+= this_tf
.may_fallthru
;
1393 ndests
+= this_tf
.may_return
;
1394 ndests
+= this_tf
.may_throw
;
1396 /* If the FINALLY block is not reachable, dike it out. */
1398 *tp
= TREE_OPERAND (*tp
, 0);
1400 /* If the finally block doesn't fall through, then any destination
1401 we might try to impose there isn't reached either. There may be
1402 some minor amount of cleanup and redirection still needed. */
1403 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1404 lower_try_finally_nofallthru (state
, &this_tf
);
1406 /* We can easily special-case redirection to a single destination. */
1407 else if (ndests
== 1)
1408 lower_try_finally_onedest (state
, &this_tf
);
1410 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1411 lower_try_finally_copy (state
, &this_tf
);
1413 lower_try_finally_switch (state
, &this_tf
);
1415 /* If someone requested we add a label at the end of the transformed
1417 if (this_tf
.fallthru_label
)
1419 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1420 append_to_statement_list (x
, tp
);
1423 VEC_free (tree
, heap
, this_tf
.dest_array
);
1424 if (this_tf
.goto_queue
)
1425 free (this_tf
.goto_queue
);
1428 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1429 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1430 exception region trees that record all the magic. */
1433 lower_catch (struct leh_state
*state
, tree
*tp
)
1435 struct eh_region
*try_region
;
1436 struct leh_state this_state
;
1437 tree_stmt_iterator i
;
1440 try_region
= gen_eh_region_try (state
->cur_region
);
1441 this_state
.cur_region
= try_region
;
1442 this_state
.prev_try
= try_region
;
1443 this_state
.tf
= state
->tf
;
1445 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1447 if (!get_eh_region_may_contain_throw (try_region
))
1449 *tp
= TREE_OPERAND (*tp
, 0);
1454 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1456 struct eh_region
*catch_region
;
1457 tree
catch, x
, eh_label
;
1459 catch = tsi_stmt (i
);
1460 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1462 this_state
.cur_region
= catch_region
;
1463 this_state
.prev_try
= state
->prev_try
;
1464 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1466 eh_label
= create_artificial_label ();
1467 set_eh_region_tree_label (catch_region
, eh_label
);
1469 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1470 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1472 if (block_may_fallthru (CATCH_BODY (catch)))
1475 out_label
= create_artificial_label ();
1477 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1478 append_to_statement_list (x
, &CATCH_BODY (catch));
1481 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1485 frob_into_branch_around (tp
, NULL
, out_label
);
1488 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1489 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1490 region trees that record all the magic. */
1493 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1495 struct leh_state this_state
;
1496 struct eh_region
*this_region
;
1497 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1500 if (EH_FILTER_MUST_NOT_THROW (inner
))
1501 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1503 this_region
= gen_eh_region_allowed (state
->cur_region
,
1504 EH_FILTER_TYPES (inner
));
1505 this_state
= *state
;
1506 this_state
.cur_region
= this_region
;
1507 /* For must not throw regions any cleanup regions inside it
1508 can't reach outer catch regions. */
1509 if (EH_FILTER_MUST_NOT_THROW (inner
))
1510 this_state
.prev_try
= NULL
;
1512 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1514 if (!get_eh_region_may_contain_throw (this_region
))
1516 *tp
= TREE_OPERAND (*tp
, 0);
1520 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1521 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1523 eh_label
= create_artificial_label ();
1524 set_eh_region_tree_label (this_region
, eh_label
);
1526 frob_into_branch_around (tp
, eh_label
, NULL
);
1529 /* Implement a cleanup expression. This is similar to try-finally,
1530 except that we only execute the cleanup block for exception edges. */
1533 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1535 struct leh_state this_state
;
1536 struct eh_region
*this_region
;
1537 struct leh_tf_state fake_tf
;
1539 /* If not using eh, then exception-only cleanups are no-ops. */
1540 if (!flag_exceptions
)
1542 *tp
= TREE_OPERAND (*tp
, 0);
1543 lower_eh_constructs_1 (state
, tp
);
1547 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1548 this_state
= *state
;
1549 this_state
.cur_region
= this_region
;
1551 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1553 if (!get_eh_region_may_contain_throw (this_region
))
1555 *tp
= TREE_OPERAND (*tp
, 0);
1559 /* Build enough of a try-finally state so that we can reuse
1560 honor_protect_cleanup_actions. */
1561 memset (&fake_tf
, 0, sizeof (fake_tf
));
1563 fake_tf
.outer
= state
;
1564 fake_tf
.region
= this_region
;
1565 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1566 fake_tf
.may_throw
= true;
1568 fake_tf
.eh_label
= create_artificial_label ();
1569 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1571 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1573 if (fake_tf
.may_throw
)
1575 /* In this case honor_protect_cleanup_actions had nothing to do,
1576 and we should process this normally. */
1577 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1578 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1582 /* In this case honor_protect_cleanup_actions did nearly all of
1583 the work. All we have left is to append the fallthru_label. */
1585 *tp
= TREE_OPERAND (*tp
, 0);
1586 if (fake_tf
.fallthru_label
)
1588 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1589 append_to_statement_list (x
, tp
);
1594 /* Main loop for lowering eh constructs. */
1597 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1599 tree_stmt_iterator i
;
1602 switch (TREE_CODE (t
))
1605 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1606 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1610 /* Look for things that can throw exceptions, and record them. */
1611 if (state
->cur_region
&& tree_could_throw_p (t
))
1613 record_stmt_eh_region (state
->cur_region
, t
);
1614 note_eh_region_may_contain_throw (state
->cur_region
);
1619 /* Look for things that can throw exceptions, and record them. */
1620 if (state
->cur_region
&& tree_could_throw_p (t
))
1622 record_stmt_eh_region (state
->cur_region
, t
);
1623 note_eh_region_may_contain_throw (state
->cur_region
);
1629 maybe_record_in_goto_queue (state
, t
);
1632 verify_norecord_switch_expr (state
, t
);
1635 case TRY_FINALLY_EXPR
:
1636 lower_try_finally (state
, tp
);
1639 case TRY_CATCH_EXPR
:
1640 i
= tsi_start (TREE_OPERAND (t
, 1));
1641 switch (TREE_CODE (tsi_stmt (i
)))
1644 lower_catch (state
, tp
);
1646 case EH_FILTER_EXPR
:
1647 lower_eh_filter (state
, tp
);
1650 lower_cleanup (state
, tp
);
1655 case STATEMENT_LIST
:
1656 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1658 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1660 if (TREE_CODE (t
) == STATEMENT_LIST
)
1662 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1671 /* A type, a decl, or some kind of statement that we're not
1672 interested in. Don't walk them. */
1678 lower_eh_constructs (void)
1680 struct leh_state null_state
;
1681 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1683 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1685 collect_finally_tree (*tp
, NULL
);
1687 memset (&null_state
, 0, sizeof (null_state
));
1688 lower_eh_constructs_1 (&null_state
, tp
);
1690 htab_delete (finally_tree
);
1692 collect_eh_region_array ();
1696 struct tree_opt_pass pass_lower_eh
=
1700 lower_eh_constructs
, /* execute */
1703 0, /* static_pass_number */
1704 TV_TREE_EH
, /* tv_id */
1705 PROP_gimple_lcf
, /* properties_required */
1706 PROP_gimple_leh
, /* properties_provided */
1707 0, /* properties_destroyed */
1708 0, /* todo_flags_start */
1709 TODO_dump_func
, /* todo_flags_finish */
1714 /* Construct EH edges for STMT. */
1717 make_eh_edge (struct eh_region
*region
, void *data
)
1720 basic_block src
, dst
;
1723 lab
= get_eh_region_tree_label (region
);
1725 src
= bb_for_stmt (stmt
);
1726 dst
= label_to_block (lab
);
1728 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1732 make_eh_edges (tree stmt
)
1737 if (TREE_CODE (stmt
) == RESX_EXPR
)
1739 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1744 region_nr
= lookup_stmt_eh_region (stmt
);
1750 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1753 static bool mark_eh_edge_found_error
;
1755 /* Mark edge make_eh_edge would create for given region by setting it aux
1756 field, output error if something goes wrong. */
1758 mark_eh_edge (struct eh_region
*region
, void *data
)
1761 basic_block src
, dst
;
1765 lab
= get_eh_region_tree_label (region
);
1767 src
= bb_for_stmt (stmt
);
1768 dst
= label_to_block (lab
);
1770 e
= find_edge (src
, dst
);
1773 error ("EH edge %i->%i is missing", src
->index
, dst
->index
);
1774 mark_eh_edge_found_error
= true;
1776 else if (!(e
->flags
& EDGE_EH
))
1778 error ("EH edge %i->%i miss EH flag", src
->index
, dst
->index
);
1779 mark_eh_edge_found_error
= true;
1783 /* ??? might not be mistake. */
1784 error ("EH edge %i->%i has duplicated regions", src
->index
, dst
->index
);
1785 mark_eh_edge_found_error
= true;
1791 /* Verify that BB containing stmt as last stmt has precisely the edges
1792 make_eh_edges would create. */
1794 verify_eh_edges (tree stmt
)
1798 basic_block bb
= bb_for_stmt (stmt
);
1802 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1803 gcc_assert (!e
->aux
);
1804 mark_eh_edge_found_error
= false;
1805 if (TREE_CODE (stmt
) == RESX_EXPR
)
1807 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1812 region_nr
= lookup_stmt_eh_region (stmt
);
1815 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1816 if (e
->flags
& EDGE_EH
)
1818 error ("BB %i can not throw but has EH edges", bb
->index
);
1823 if (!tree_could_throw_p (stmt
))
1825 error ("BB %i last statement has incorrectly set region", bb
->index
);
1831 foreach_reachable_handler (region_nr
, is_resx
, mark_eh_edge
, stmt
);
1832 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1834 if ((e
->flags
& EDGE_EH
) && !e
->aux
)
1836 error ("unnecessary EH edge %i->%i", bb
->index
, e
->dest
->index
);
1837 mark_eh_edge_found_error
= true;
1842 return mark_eh_edge_found_error
;
1846 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1847 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1848 This routine expects only GIMPLE lhs or rhs input. */
1851 tree_could_trap_p (tree expr
)
1853 enum tree_code code
= TREE_CODE (expr
);
1854 bool honor_nans
= false;
1855 bool honor_snans
= false;
1856 bool fp_operation
= false;
1857 bool honor_trapv
= false;
1860 if (TREE_CODE_CLASS (code
) == tcc_comparison
1861 || TREE_CODE_CLASS (code
) == tcc_unary
1862 || TREE_CODE_CLASS (code
) == tcc_binary
)
1864 t
= TREE_TYPE (expr
);
1865 fp_operation
= FLOAT_TYPE_P (t
);
1868 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1869 honor_snans
= flag_signaling_nans
!= 0;
1871 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
1878 case TARGET_MEM_REF
:
1879 /* For TARGET_MEM_REFs use the information based on the original
1881 expr
= TMR_ORIGINAL (expr
);
1882 code
= TREE_CODE (expr
);
1889 case VIEW_CONVERT_EXPR
:
1890 case WITH_SIZE_EXPR
:
1891 expr
= TREE_OPERAND (expr
, 0);
1892 code
= TREE_CODE (expr
);
1895 case ARRAY_RANGE_REF
:
1896 base
= TREE_OPERAND (expr
, 0);
1897 if (tree_could_trap_p (base
))
1900 if (TREE_THIS_NOTRAP (expr
))
1903 return !range_in_array_bounds_p (expr
);
1906 base
= TREE_OPERAND (expr
, 0);
1907 if (tree_could_trap_p (base
))
1910 if (TREE_THIS_NOTRAP (expr
))
1913 return !in_array_bounds_p (expr
);
1916 case ALIGN_INDIRECT_REF
:
1917 case MISALIGNED_INDIRECT_REF
:
1918 return !TREE_THIS_NOTRAP (expr
);
1921 return TREE_THIS_VOLATILE (expr
);
1923 case TRUNC_DIV_EXPR
:
1925 case FLOOR_DIV_EXPR
:
1926 case ROUND_DIV_EXPR
:
1927 case EXACT_DIV_EXPR
:
1929 case FLOOR_MOD_EXPR
:
1930 case ROUND_MOD_EXPR
:
1931 case TRUNC_MOD_EXPR
:
1933 if (honor_snans
|| honor_trapv
)
1936 return flag_trapping_math
;
1937 t
= TREE_OPERAND (expr
, 1);
1938 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1947 /* Some floating point comparisons may trap. */
1952 case UNORDERED_EXPR
:
1962 case FIX_TRUNC_EXPR
:
1964 case FIX_FLOOR_EXPR
:
1965 case FIX_ROUND_EXPR
:
1966 /* Conversion of floating point might trap. */
1972 /* These operations don't trap with floating point. */
1980 /* Any floating arithmetic may trap. */
1981 if (fp_operation
&& flag_trapping_math
)
1988 t
= get_callee_fndecl (expr
);
1989 /* Assume that calls to weak functions may trap. */
1990 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1995 /* Any floating arithmetic may trap. */
1996 if (fp_operation
&& flag_trapping_math
)
2003 tree_could_throw_p (tree t
)
2005 if (!flag_exceptions
)
2007 if (TREE_CODE (t
) == MODIFY_EXPR
)
2009 if (flag_non_call_exceptions
2010 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2012 t
= TREE_OPERAND (t
, 1);
2015 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2016 t
= TREE_OPERAND (t
, 0);
2017 if (TREE_CODE (t
) == CALL_EXPR
)
2018 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2019 if (flag_non_call_exceptions
)
2020 return tree_could_trap_p (t
);
2025 tree_can_throw_internal (tree stmt
)
2028 bool is_resx
= false;
2030 if (TREE_CODE (stmt
) == RESX_EXPR
)
2031 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2033 region_nr
= lookup_stmt_eh_region (stmt
);
2036 return can_throw_internal_1 (region_nr
, is_resx
);
2040 tree_can_throw_external (tree stmt
)
2043 bool is_resx
= false;
2045 if (TREE_CODE (stmt
) == RESX_EXPR
)
2046 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2048 region_nr
= lookup_stmt_eh_region (stmt
);
2050 return tree_could_throw_p (stmt
);
2052 return can_throw_external_1 (region_nr
, is_resx
);
2055 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2056 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2057 in the table if it should be in there. Return TRUE if a replacement was
2058 done that my require an EH edge purge. */
2061 maybe_clean_or_replace_eh_stmt (tree old_stmt
, tree new_stmt
)
2063 int region_nr
= lookup_stmt_eh_region (old_stmt
);
2067 bool new_stmt_could_throw
= tree_could_throw_p (new_stmt
);
2069 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2072 remove_stmt_from_eh_region (old_stmt
);
2073 if (new_stmt_could_throw
)
2075 add_stmt_to_eh_region (new_stmt
, region_nr
);
2085 #ifdef ENABLE_CHECKING
2087 verify_eh_throw_stmt_node (void **slot
, void *data ATTRIBUTE_UNUSED
)
2089 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
2091 gcc_assert (node
->stmt
->common
.ann
== NULL
);
2096 verify_eh_throw_table_statements (void)
2098 if (!get_eh_throw_stmt_table (cfun
))
2100 htab_traverse (get_eh_throw_stmt_table (cfun
),
2101 verify_eh_throw_stmt_node
,