1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p
= 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p
= 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a
, const void *b
)
59 const void * const * x
= a
;
60 const void * const * y
= b
;
65 struct_ptr_hash (const void *a
)
67 const void * const * x
= a
;
68 return (size_t)*x
>> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 struct throw_stmt_node
GTY(())
90 static GTY((param_is (struct throw_stmt_node
))) htab_t throw_stmt_table
;
93 record_stmt_eh_region (struct eh_region
*region
, tree t
)
95 struct throw_stmt_node
*n
;
101 n
= ggc_alloc (sizeof (*n
));
103 n
->region_nr
= get_eh_region_number (region
);
105 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
111 add_stmt_to_eh_region (tree t
, int num
)
113 struct throw_stmt_node
*n
;
116 gcc_assert (num
>= 0);
118 n
= ggc_alloc (sizeof (*n
));
122 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
128 remove_stmt_from_eh_region (tree t
)
130 struct throw_stmt_node dummy
;
133 if (!throw_stmt_table
)
137 slot
= htab_find_slot (throw_stmt_table
, &dummy
, NO_INSERT
);
140 htab_clear_slot (throw_stmt_table
, slot
);
148 lookup_stmt_eh_region (tree t
)
150 struct throw_stmt_node
*p
, n
;
152 if (!throw_stmt_table
)
156 p
= htab_find (throw_stmt_table
, &n
);
158 return (p
? p
->region_nr
: -1);
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 struct finally_tree_node
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree
;
176 record_in_finally_tree (tree child
, tree parent
)
178 struct finally_tree_node
*n
;
181 n
= xmalloc (sizeof (*n
));
185 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
191 collect_finally_tree (tree t
, tree region
)
194 switch (TREE_CODE (t
))
197 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
200 case TRY_FINALLY_EXPR
:
201 record_in_finally_tree (t
, region
);
202 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
203 t
= TREE_OPERAND (t
, 1);
207 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
208 t
= TREE_OPERAND (t
, 1);
216 t
= EH_FILTER_FAILURE (t
);
221 tree_stmt_iterator i
;
222 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
223 collect_finally_tree (tsi_stmt (i
), region
);
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
238 outside_finally_tree (tree start
, tree target
)
240 struct finally_tree_node n
, *p
;
245 p
= htab_find (finally_tree
, &n
);
250 while (start
!= target
);
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
260 /* State of the world while lowering. */
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region
*cur_region
;
268 struct eh_region
*prev_try
;
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state
*tf
;
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr
;
285 /* The state outside this try_finally node. */
286 struct leh_state
*outer
;
288 /* The exception region created for it. */
289 struct eh_region
*region
;
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node
{
299 size_t goto_queue_size
;
300 size_t goto_queue_active
;
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array
;
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
326 static void lower_eh_filter (struct leh_state
*, tree
*);
327 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
333 goto_queue_cmp (const void *x
, const void *y
)
335 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
336 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
337 return (a
== b
? 0 : a
< b
? -1 : 1);
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
344 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
346 struct goto_queue_node tmp
, *ret
;
348 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
349 sizeof (struct goto_queue_node
), goto_queue_cmp
);
350 return (ret
? ret
->repl_stmt
: NULL
);
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
359 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
360 tree_stmt_iterator
*tsi
)
362 tree
new, one
, label
;
364 new = find_goto_replacement (tf
, *tp
);
368 one
= expr_only (new);
369 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
375 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
376 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
378 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
379 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
385 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
388 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
390 switch (TREE_CODE (t
))
394 t
= find_goto_replacement (tf
, t
);
397 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
408 case TRY_FINALLY_EXPR
:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
414 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
424 /* These won't have gotos in them. */
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
434 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
436 tree_stmt_iterator i
= tsi_start (t
);
437 while (!tsi_end_p (i
))
438 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
441 /* Replace all goto queue members. */
444 replace_goto_queue (struct leh_tf_state
*tf
)
446 if (tf
->goto_queue_active
== 0)
448 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
451 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
452 node, and if so record that fact in the goto queue associated with that
456 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
458 struct leh_tf_state
*tf
= state
->tf
;
459 struct goto_queue_node
*q
;
466 switch (TREE_CODE (stmt
))
470 tree lab
= GOTO_DESTINATION (stmt
);
472 /* Computed and non-local gotos do not get processed. Given
473 their nature we can neither tell whether we've escaped the
474 finally block nor redirect them if we knew. */
475 if (TREE_CODE (lab
) != LABEL_DECL
)
478 /* No need to record gotos that don't leave the try block. */
479 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
482 if (! tf
->dest_array
)
484 VARRAY_TREE_INIT (tf
->dest_array
, 10, "dest_array");
485 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
490 int n
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
491 for (index
= 0; index
< n
; ++index
)
492 if (VARRAY_TREE (tf
->dest_array
, index
) == lab
)
495 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
501 tf
->may_return
= true;
509 active
= tf
->goto_queue_active
;
510 size
= tf
->goto_queue_size
;
513 size
= (size
? size
* 2 : 32);
514 tf
->goto_queue_size
= size
;
516 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
519 q
= &tf
->goto_queue
[active
];
520 tf
->goto_queue_active
= active
+ 1;
522 memset (q
, 0, sizeof (*q
));
527 #ifdef ENABLE_CHECKING
528 /* We do not process SWITCH_EXPRs for now. As long as the original source
529 was in fact structured, and we've not yet done jump threading, then none
530 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
533 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
535 struct leh_tf_state
*tf
= state
->tf
;
542 vec
= SWITCH_LABELS (switch_expr
);
543 n
= TREE_VEC_LENGTH (vec
);
545 for (i
= 0; i
< n
; ++i
)
547 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
548 gcc_assert (!outside_finally_tree (lab
, tf
->try_finally_expr
));
552 #define verify_norecord_switch_expr(state, switch_expr)
555 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
556 whatever is needed to finish the return. If MOD is non-null, insert it
557 before the new branch. RETURN_VALUE_P is a cache containing a temporary
558 variable to be used in manipulating the value returned from the function. */
561 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
562 tree
*return_value_p
)
564 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
569 /* The nasty part about redirecting the return value is that the
570 return value itself is to be computed before the FINALLY block
584 should return 0, not 1. Arrange for this to happen by copying
585 computed the return value into a local temporary. This also
586 allows us to redirect multiple return statements through the
587 same destination block; whether this is a net win or not really
588 depends, I guess, but it does make generation of the switch in
589 lower_try_finally_switch easier. */
591 switch (TREE_CODE (ret_expr
))
594 if (!*return_value_p
)
595 *return_value_p
= ret_expr
;
597 gcc_assert (*return_value_p
== ret_expr
);
598 q
->cont_stmt
= q
->stmt
;
603 tree result
= TREE_OPERAND (ret_expr
, 0);
604 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
606 if (!*return_value_p
)
608 if (aggregate_value_p (TREE_TYPE (result
),
609 TREE_TYPE (current_function_decl
)))
610 /* If this function returns in memory, copy the argument
611 into the return slot now. Otherwise, we might need to
612 worry about magic return semantics, so we need to use a
613 temporary to hold the value until we're actually ready
617 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
618 *return_value_p
= new;
621 new = *return_value_p
;
623 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
624 append_to_statement_list (x
, &q
->repl_stmt
);
629 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
630 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
639 /* If we don't return a value, all return statements are the same. */
640 q
->cont_stmt
= q
->stmt
;
644 append_to_statement_list (mod
, &q
->repl_stmt
);
646 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
647 append_to_statement_list (x
, &q
->repl_stmt
);
650 /* Similar, but easier, for GOTO_EXPR. */
653 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
657 q
->cont_stmt
= q
->stmt
;
659 append_to_statement_list (mod
, &q
->repl_stmt
);
661 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
662 append_to_statement_list (x
, &q
->repl_stmt
);
665 /* We want to transform
666 try { body; } catch { stuff; }
668 body; goto over; lab: stuff; over:
670 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
671 should be placed before the second operand, or NULL. OVER is
672 an existing label that should be put at the exit, or NULL. */
675 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
679 op1
= TREE_OPERAND (*tp
, 1);
680 *tp
= TREE_OPERAND (*tp
, 0);
682 if (block_may_fallthru (*tp
))
685 over
= create_artificial_label ();
686 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
687 append_to_statement_list (x
, tp
);
692 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
693 append_to_statement_list (x
, tp
);
696 append_to_statement_list (op1
, tp
);
700 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
701 append_to_statement_list (x
, tp
);
705 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
706 Make sure to record all new labels found. */
709 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
713 t
= unsave_expr_now (t
);
716 region
= outer_state
->tf
->try_finally_expr
;
717 collect_finally_tree (t
, region
);
722 /* A subroutine of lower_try_finally. Create a fallthru label for
723 the given try_finally state. The only tricky bit here is that
724 we have to make sure to record the label in our outer context. */
727 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
729 tree label
= tf
->fallthru_label
;
732 label
= create_artificial_label ();
733 tf
->fallthru_label
= label
;
735 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
740 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
741 returns non-null, then the language requires that the exception path out
742 of a try_finally be treated specially. To wit: the code within the
743 finally block may not itself throw an exception. We have two choices here.
744 First we can duplicate the finally block and wrap it in a must_not_throw
745 region. Second, we can generate code like
750 if (fintmp == eh_edge)
751 protect_cleanup_actions;
754 where "fintmp" is the temporary used in the switch statement generation
755 alternative considered below. For the nonce, we always choose the first
758 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
761 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
762 struct leh_state
*this_state
,
763 struct leh_tf_state
*tf
)
765 tree protect_cleanup_actions
, finally
, x
;
766 tree_stmt_iterator i
;
767 bool finally_may_fallthru
;
769 /* First check for nothing to do. */
770 if (lang_protect_cleanup_actions
)
771 protect_cleanup_actions
= lang_protect_cleanup_actions ();
773 protect_cleanup_actions
= NULL
;
775 finally
= TREE_OPERAND (*tf
->top_p
, 1);
777 /* If the EH case of the finally block can fall through, this may be a
778 structure of the form
791 E.g. with an inline destructor with an embedded try block. In this
792 case we must save the runtime EH data around the nested exception.
794 This complication means that any time the previous runtime data might
795 be used (via fallthru from the finally) we handle the eh case here,
796 whether or not protect_cleanup_actions is active. */
798 finally_may_fallthru
= block_may_fallthru (finally
);
799 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
802 /* Duplicate the FINALLY block. Only need to do this for try-finally,
803 and not for cleanups. */
805 finally
= lower_try_finally_dup_block (finally
, outer_state
);
807 /* Resume execution after the exception. Adding this now lets
808 lower_eh_filter not add unnecessary gotos, as it is clear that
809 we never fallthru from this copy of the finally block. */
810 if (finally_may_fallthru
)
812 tree save_eptr
, save_filt
;
814 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
815 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
817 i
= tsi_start (finally
);
818 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
819 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
820 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
822 x
= build (FILTER_EXPR
, integer_type_node
);
823 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
824 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
826 i
= tsi_last (finally
);
827 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
828 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
829 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
831 x
= build (FILTER_EXPR
, integer_type_node
);
832 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
833 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
835 x
= build1 (RESX_EXPR
, void_type_node
,
836 build_int_cst (NULL_TREE
,
837 get_eh_region_number (tf
->region
)));
838 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
841 /* Wrap the block with protect_cleanup_actions as the action. */
842 if (protect_cleanup_actions
)
844 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
845 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
846 EH_FILTER_MUST_NOT_THROW (x
) = 1;
847 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
848 lower_eh_filter (outer_state
, &finally
);
851 lower_eh_constructs_1 (outer_state
, &finally
);
853 /* Hook this up to the end of the existing try block. If we
854 previously fell through the end, we'll have to branch around.
855 This means adding a new goto, and adding it to the queue. */
857 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
859 if (tf
->may_fallthru
)
861 x
= lower_try_finally_fallthru_label (tf
);
862 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
863 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
866 maybe_record_in_goto_queue (this_state
, x
);
868 tf
->may_fallthru
= false;
871 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
872 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
873 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
875 /* Having now been handled, EH isn't to be considered with
876 the rest of the outgoing edges. */
877 tf
->may_throw
= false;
880 /* A subroutine of lower_try_finally. We have determined that there is
881 no fallthru edge out of the finally block. This means that there is
882 no outgoing edge corresponding to any incoming edge. Restructure the
883 try_finally node for this special case. */
886 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
888 tree x
, finally
, lab
, return_val
;
889 struct goto_queue_node
*q
, *qe
;
894 lab
= create_artificial_label ();
896 finally
= TREE_OPERAND (*tf
->top_p
, 1);
897 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
899 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
900 append_to_statement_list (x
, tf
->top_p
);
904 qe
= q
+ tf
->goto_queue_active
;
907 do_return_redirection (q
, lab
, NULL
, &return_val
);
909 do_goto_redirection (q
, lab
, NULL
);
911 replace_goto_queue (tf
);
913 lower_eh_constructs_1 (state
, &finally
);
914 append_to_statement_list (finally
, tf
->top_p
);
917 /* A subroutine of lower_try_finally. We have determined that there is
918 exactly one destination of the finally block. Restructure the
919 try_finally node for this special case. */
922 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
924 struct goto_queue_node
*q
, *qe
;
925 tree x
, finally
, finally_label
;
927 finally
= TREE_OPERAND (*tf
->top_p
, 1);
928 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
930 lower_eh_constructs_1 (state
, &finally
);
934 /* Only reachable via the exception edge. Add the given label to
935 the head of the FINALLY block. Append a RESX at the end. */
937 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
938 append_to_statement_list (x
, tf
->top_p
);
940 append_to_statement_list (finally
, tf
->top_p
);
942 x
= build1 (RESX_EXPR
, void_type_node
,
943 build_int_cst (NULL_TREE
,
944 get_eh_region_number (tf
->region
)));
945 append_to_statement_list (x
, tf
->top_p
);
950 if (tf
->may_fallthru
)
952 /* Only reachable via the fallthru edge. Do nothing but let
953 the two blocks run together; we'll fall out the bottom. */
954 append_to_statement_list (finally
, tf
->top_p
);
958 finally_label
= create_artificial_label ();
959 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
960 append_to_statement_list (x
, tf
->top_p
);
962 append_to_statement_list (finally
, tf
->top_p
);
965 qe
= q
+ tf
->goto_queue_active
;
969 /* Reachable by return expressions only. Redirect them. */
970 tree return_val
= NULL
;
972 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
973 replace_goto_queue (tf
);
977 /* Reachable by goto expressions only. Redirect them. */
979 do_goto_redirection (q
, finally_label
, NULL
);
980 replace_goto_queue (tf
);
982 if (VARRAY_TREE (tf
->dest_array
, 0) == tf
->fallthru_label
)
984 /* Reachable by goto to fallthru label only. Redirect it
985 to the new label (already created, sadly), and do not
986 emit the final branch out, or the fallthru label. */
987 tf
->fallthru_label
= NULL
;
992 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
993 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
996 /* A subroutine of lower_try_finally. There are multiple edges incoming
997 and outgoing from the finally block. Implement this by duplicating the
998 finally block for every destination. */
1001 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1003 tree finally
, new_stmt
;
1006 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1007 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1009 new_stmt
= NULL_TREE
;
1011 if (tf
->may_fallthru
)
1013 x
= lower_try_finally_dup_block (finally
, state
);
1014 lower_eh_constructs_1 (state
, &x
);
1015 append_to_statement_list (x
, &new_stmt
);
1017 x
= lower_try_finally_fallthru_label (tf
);
1018 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1019 append_to_statement_list (x
, &new_stmt
);
1024 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1025 append_to_statement_list (x
, &new_stmt
);
1027 x
= lower_try_finally_dup_block (finally
, state
);
1028 lower_eh_constructs_1 (state
, &x
);
1029 append_to_statement_list (x
, &new_stmt
);
1031 x
= build1 (RESX_EXPR
, void_type_node
,
1032 build_int_cst (NULL_TREE
,
1033 get_eh_region_number (tf
->region
)));
1034 append_to_statement_list (x
, &new_stmt
);
1039 struct goto_queue_node
*q
, *qe
;
1040 tree return_val
= NULL
;
1041 int return_index
, index
;
1044 struct goto_queue_node
*q
;
1049 return_index
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1052 labels
= xcalloc (sizeof (*labels
), return_index
+ 1);
1055 qe
= q
+ tf
->goto_queue_active
;
1058 index
= q
->index
< 0 ? return_index
: q
->index
;
1060 if (!labels
[index
].q
)
1061 labels
[index
].q
= q
;
1064 for (index
= 0; index
< return_index
+ 1; index
++)
1068 q
= labels
[index
].q
;
1072 lab
= labels
[index
].label
= create_artificial_label ();
1074 if (index
== return_index
)
1075 do_return_redirection (q
, lab
, NULL
, &return_val
);
1077 do_goto_redirection (q
, lab
, NULL
);
1079 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1080 append_to_statement_list (x
, &new_stmt
);
1082 x
= lower_try_finally_dup_block (finally
, state
);
1083 lower_eh_constructs_1 (state
, &x
);
1084 append_to_statement_list (x
, &new_stmt
);
1086 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1087 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1090 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1094 index
= q
->index
< 0 ? return_index
: q
->index
;
1096 if (labels
[index
].q
== q
)
1099 lab
= labels
[index
].label
;
1101 if (index
== return_index
)
1102 do_return_redirection (q
, lab
, NULL
, &return_val
);
1104 do_goto_redirection (q
, lab
, NULL
);
1107 replace_goto_queue (tf
);
1111 /* Need to link new stmts after running replace_goto_queue due
1112 to not wanting to process the same goto stmts twice. */
1113 append_to_statement_list (new_stmt
, tf
->top_p
);
1116 /* A subroutine of lower_try_finally. There are multiple edges incoming
1117 and outgoing from the finally block. Implement this by instrumenting
1118 each incoming edge and creating a switch statement at the end of the
1119 finally block that branches to the appropriate destination. */
1122 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1124 struct goto_queue_node
*q
, *qe
;
1125 tree return_val
= NULL
;
1126 tree finally
, finally_tmp
, finally_label
;
1127 int return_index
, eh_index
, fallthru_index
;
1128 int nlabels
, ndests
, j
, last_case_index
;
1129 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1132 /* Mash the TRY block to the head of the chain. */
1133 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1134 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1136 /* Lower the finally block itself. */
1137 lower_eh_constructs_1 (state
, &finally
);
1139 /* Prepare for switch statement generation. */
1141 nlabels
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1144 return_index
= nlabels
;
1145 eh_index
= return_index
+ tf
->may_return
;
1146 fallthru_index
= eh_index
+ tf
->may_throw
;
1147 ndests
= fallthru_index
+ tf
->may_fallthru
;
1149 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1150 finally_label
= create_artificial_label ();
1152 case_label_vec
= make_tree_vec (ndests
);
1153 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1154 NULL_TREE
, case_label_vec
);
1157 last_case_index
= 0;
1159 /* Begin inserting code for getting to the finally block. Things
1160 are done in this order to correspond to the sequence the code is
1163 if (tf
->may_fallthru
)
1165 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1166 build_int_cst (NULL_TREE
, fallthru_index
));
1167 append_to_statement_list (x
, tf
->top_p
);
1171 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1172 append_to_statement_list (x
, tf
->top_p
);
1176 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1177 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1178 create_artificial_label ());
1179 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1182 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1183 append_to_statement_list (x
, &switch_body
);
1185 x
= lower_try_finally_fallthru_label (tf
);
1186 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1187 append_to_statement_list (x
, &switch_body
);
1192 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1193 append_to_statement_list (x
, tf
->top_p
);
1195 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1196 build_int_cst (NULL_TREE
, eh_index
));
1197 append_to_statement_list (x
, tf
->top_p
);
1199 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1200 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1201 create_artificial_label ());
1202 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1205 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1206 append_to_statement_list (x
, &switch_body
);
1207 x
= build1 (RESX_EXPR
, void_type_node
,
1208 build_int_cst (NULL_TREE
,
1209 get_eh_region_number (tf
->region
)));
1210 append_to_statement_list (x
, &switch_body
);
1213 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1214 append_to_statement_list (x
, tf
->top_p
);
1216 append_to_statement_list (finally
, tf
->top_p
);
1218 /* Redirect each incoming goto edge. */
1220 qe
= q
+ tf
->goto_queue_active
;
1221 j
= last_case_index
+ tf
->may_return
;
1225 int switch_id
, case_index
;
1229 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1230 build_int_cst (NULL_TREE
, return_index
));
1231 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1232 switch_id
= return_index
;
1236 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1237 build_int_cst (NULL_TREE
, q
->index
));
1238 do_goto_redirection (q
, finally_label
, mod
);
1239 switch_id
= q
->index
;
1242 case_index
= j
+ q
->index
;
1243 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1244 TREE_VEC_ELT (case_label_vec
, case_index
)
1245 = build (CASE_LABEL_EXPR
, void_type_node
,
1246 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1247 /* We store the cont_stmt in the
1248 CASE_LABEL, so that we can recover it
1249 in the loop below. We don't create
1250 the new label while walking the
1251 goto_queue because pointers don't
1252 offer a stable order. */
1255 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1260 last_case
= TREE_VEC_ELT (case_label_vec
, j
);
1262 gcc_assert (last_case
);
1264 cont_stmt
= CASE_LABEL (last_case
);
1266 label
= create_artificial_label ();
1267 CASE_LABEL (last_case
) = label
;
1269 x
= build (LABEL_EXPR
, void_type_node
, label
);
1270 append_to_statement_list (x
, &switch_body
);
1271 append_to_statement_list (cont_stmt
, &switch_body
);
1272 maybe_record_in_goto_queue (state
, cont_stmt
);
1274 replace_goto_queue (tf
);
1276 /* Make sure that the last case is the default label, as one is required.
1277 Then sort the labels, which is also required in GIMPLE. */
1278 CASE_LOW (last_case
) = NULL
;
1279 sort_case_labels (case_label_vec
);
1281 /* Need to link switch_stmt after running replace_goto_queue due
1282 to not wanting to process the same goto stmts twice. */
1283 append_to_statement_list (switch_stmt
, tf
->top_p
);
1284 append_to_statement_list (switch_body
, tf
->top_p
);
1287 /* Decide whether or not we are going to duplicate the finally block.
1288 There are several considerations.
1290 First, if this is Java, then the finally block contains code
1291 written by the user. It has line numbers associated with it,
1292 so duplicating the block means it's difficult to set a breakpoint.
1293 Since controlling code generation via -g is verboten, we simply
1294 never duplicate code without optimization.
1296 Second, we'd like to prevent egregious code growth. One way to
1297 do this is to estimate the size of the finally block, multiply
1298 that by the number of copies we'd need to make, and compare against
1299 the estimate of the size of the switch machinery we'd have to add. */
1302 decide_copy_try_finally (int ndests
, tree finally
)
1304 int f_estimate
, sw_estimate
;
1309 /* Finally estimate N times, plus N gotos. */
1310 f_estimate
= estimate_num_insns (finally
);
1311 f_estimate
= (f_estimate
+ 1) * ndests
;
1313 /* Switch statement (cost 10), N variable assignments, N gotos. */
1314 sw_estimate
= 10 + 2 * ndests
;
1316 /* Optimize for size clearly wants our best guess. */
1318 return f_estimate
< sw_estimate
;
1320 /* ??? These numbers are completely made up so far. */
1322 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1324 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1327 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1328 to a sequence of labels and blocks, plus the exception region trees
1329 that record all the magic. This is complicated by the need to
1330 arrange for the FINALLY block to be executed on all exits. */
1333 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1335 struct leh_tf_state this_tf
;
1336 struct leh_state this_state
;
1339 /* Process the try block. */
1341 memset (&this_tf
, 0, sizeof (this_tf
));
1342 this_tf
.try_finally_expr
= *tp
;
1344 this_tf
.outer
= state
;
1345 if (using_eh_for_cleanups_p
)
1347 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1349 this_tf
.region
= NULL
;
1351 this_state
.cur_region
= this_tf
.region
;
1352 this_state
.prev_try
= state
->prev_try
;
1353 this_state
.tf
= &this_tf
;
1355 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1357 /* Determine if the try block is escaped through the bottom. */
1358 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1360 /* Determine if any exceptions are possible within the try block. */
1361 if (using_eh_for_cleanups_p
)
1362 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1363 if (this_tf
.may_throw
)
1365 this_tf
.eh_label
= create_artificial_label ();
1366 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1367 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1370 /* Sort the goto queue for efficient searching later. */
1371 if (this_tf
.goto_queue_active
> 1)
1372 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1373 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1375 /* Determine how many edges (still) reach the finally block. Or rather,
1376 how many destinations are reached by the finally block. Use this to
1377 determine how we process the finally block itself. */
1379 if (this_tf
.dest_array
)
1380 ndests
= VARRAY_ACTIVE_SIZE (this_tf
.dest_array
);
1383 ndests
+= this_tf
.may_fallthru
;
1384 ndests
+= this_tf
.may_return
;
1385 ndests
+= this_tf
.may_throw
;
1387 /* If the FINALLY block is not reachable, dike it out. */
1389 *tp
= TREE_OPERAND (*tp
, 0);
1391 /* If the finally block doesn't fall through, then any destination
1392 we might try to impose there isn't reached either. There may be
1393 some minor amount of cleanup and redirection still needed. */
1394 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1395 lower_try_finally_nofallthru (state
, &this_tf
);
1397 /* We can easily special-case redirection to a single destination. */
1398 else if (ndests
== 1)
1399 lower_try_finally_onedest (state
, &this_tf
);
1401 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1402 lower_try_finally_copy (state
, &this_tf
);
1404 lower_try_finally_switch (state
, &this_tf
);
1406 /* If someone requested we add a label at the end of the transformed
1408 if (this_tf
.fallthru_label
)
1410 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1411 append_to_statement_list (x
, tp
);
1414 if (this_tf
.goto_queue
)
1415 free (this_tf
.goto_queue
);
1418 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1419 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1420 exception region trees that record all the magic. */
1423 lower_catch (struct leh_state
*state
, tree
*tp
)
1425 struct eh_region
*try_region
;
1426 struct leh_state this_state
;
1427 tree_stmt_iterator i
;
1430 try_region
= gen_eh_region_try (state
->cur_region
);
1431 this_state
.cur_region
= try_region
;
1432 this_state
.prev_try
= try_region
;
1433 this_state
.tf
= state
->tf
;
1435 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1437 if (!get_eh_region_may_contain_throw (try_region
))
1439 *tp
= TREE_OPERAND (*tp
, 0);
1444 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1446 struct eh_region
*catch_region
;
1447 tree
catch, x
, eh_label
;
1449 catch = tsi_stmt (i
);
1450 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1452 this_state
.cur_region
= catch_region
;
1453 this_state
.prev_try
= state
->prev_try
;
1454 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1456 eh_label
= create_artificial_label ();
1457 set_eh_region_tree_label (catch_region
, eh_label
);
1459 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1460 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1462 if (block_may_fallthru (CATCH_BODY (catch)))
1465 out_label
= create_artificial_label ();
1467 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1468 append_to_statement_list (x
, &CATCH_BODY (catch));
1471 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1475 frob_into_branch_around (tp
, NULL
, out_label
);
1478 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1479 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1480 region trees that record all the magic. */
1483 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1485 struct leh_state this_state
;
1486 struct eh_region
*this_region
;
1487 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1490 if (EH_FILTER_MUST_NOT_THROW (inner
))
1491 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1493 this_region
= gen_eh_region_allowed (state
->cur_region
,
1494 EH_FILTER_TYPES (inner
));
1495 this_state
= *state
;
1496 this_state
.cur_region
= this_region
;
1498 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1500 if (!get_eh_region_may_contain_throw (this_region
))
1502 *tp
= TREE_OPERAND (*tp
, 0);
1506 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1507 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1509 eh_label
= create_artificial_label ();
1510 set_eh_region_tree_label (this_region
, eh_label
);
1512 frob_into_branch_around (tp
, eh_label
, NULL
);
1515 /* Implement a cleanup expression. This is similar to try-finally,
1516 except that we only execute the cleanup block for exception edges. */
1519 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1521 struct leh_state this_state
;
1522 struct eh_region
*this_region
;
1523 struct leh_tf_state fake_tf
;
1525 /* If not using eh, then exception-only cleanups are no-ops. */
1526 if (!flag_exceptions
)
1528 *tp
= TREE_OPERAND (*tp
, 0);
1529 lower_eh_constructs_1 (state
, tp
);
1533 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1534 this_state
= *state
;
1535 this_state
.cur_region
= this_region
;
1537 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1539 if (!get_eh_region_may_contain_throw (this_region
))
1541 *tp
= TREE_OPERAND (*tp
, 0);
1545 /* Build enough of a try-finally state so that we can reuse
1546 honor_protect_cleanup_actions. */
1547 memset (&fake_tf
, 0, sizeof (fake_tf
));
1549 fake_tf
.outer
= state
;
1550 fake_tf
.region
= this_region
;
1551 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1552 fake_tf
.may_throw
= true;
1554 fake_tf
.eh_label
= create_artificial_label ();
1555 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1557 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1559 if (fake_tf
.may_throw
)
1561 /* In this case honor_protect_cleanup_actions had nothing to do,
1562 and we should process this normally. */
1563 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1564 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1568 /* In this case honor_protect_cleanup_actions did nearly all of
1569 the work. All we have left is to append the fallthru_label. */
1571 *tp
= TREE_OPERAND (*tp
, 0);
1572 if (fake_tf
.fallthru_label
)
1574 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1575 append_to_statement_list (x
, tp
);
1580 /* Main loop for lowering eh constructs. */
1583 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1585 tree_stmt_iterator i
;
1588 switch (TREE_CODE (t
))
1591 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1592 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1596 /* Look for things that can throw exceptions, and record them. */
1597 if (state
->cur_region
&& tree_could_throw_p (t
))
1599 record_stmt_eh_region (state
->cur_region
, t
);
1600 note_eh_region_may_contain_throw (state
->cur_region
);
1605 /* Look for things that can throw exceptions, and record them. */
1606 if (state
->cur_region
&& tree_could_throw_p (t
))
1610 record_stmt_eh_region (state
->cur_region
, t
);
1611 note_eh_region_may_contain_throw (state
->cur_region
);
1613 /* ??? For the benefit of calls.c, converting all this to rtl,
1614 we need to record the call expression, not just the outer
1615 modify statement. */
1616 op
= get_call_expr_in (t
);
1618 record_stmt_eh_region (state
->cur_region
, op
);
1624 maybe_record_in_goto_queue (state
, t
);
1627 verify_norecord_switch_expr (state
, t
);
1630 case TRY_FINALLY_EXPR
:
1631 lower_try_finally (state
, tp
);
1634 case TRY_CATCH_EXPR
:
1635 i
= tsi_start (TREE_OPERAND (t
, 1));
1636 switch (TREE_CODE (tsi_stmt (i
)))
1639 lower_catch (state
, tp
);
1641 case EH_FILTER_EXPR
:
1642 lower_eh_filter (state
, tp
);
1645 lower_cleanup (state
, tp
);
1650 case STATEMENT_LIST
:
1651 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1653 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1655 if (TREE_CODE (t
) == STATEMENT_LIST
)
1657 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1666 /* A type, a decl, or some kind of statement that we're not
1667 interested in. Don't walk them. */
1673 lower_eh_constructs (void)
1675 struct leh_state null_state
;
1676 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1678 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1679 throw_stmt_table
= htab_create_ggc (31, struct_ptr_hash
, struct_ptr_eq
,
1682 collect_finally_tree (*tp
, NULL
);
1684 memset (&null_state
, 0, sizeof (null_state
));
1685 lower_eh_constructs_1 (&null_state
, tp
);
1687 htab_delete (finally_tree
);
1689 collect_eh_region_array ();
1692 struct tree_opt_pass pass_lower_eh
=
1696 lower_eh_constructs
, /* execute */
1699 0, /* static_pass_number */
1700 TV_TREE_EH
, /* tv_id */
1701 PROP_gimple_lcf
, /* properties_required */
1702 PROP_gimple_leh
, /* properties_provided */
1703 PROP_gimple_lcf
, /* properties_destroyed */
1704 0, /* todo_flags_start */
1705 TODO_dump_func
, /* todo_flags_finish */
1710 /* Construct EH edges for STMT. */
1713 make_eh_edge (struct eh_region
*region
, void *data
)
1716 basic_block src
, dst
;
1719 lab
= get_eh_region_tree_label (region
);
1721 src
= bb_for_stmt (stmt
);
1722 dst
= label_to_block (lab
);
1724 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1728 make_eh_edges (tree stmt
)
1733 if (TREE_CODE (stmt
) == RESX_EXPR
)
1735 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1740 region_nr
= lookup_stmt_eh_region (stmt
);
1746 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1751 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1752 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1753 This routine expects only GIMPLE lhs or rhs input. */
1756 tree_could_trap_p (tree expr
)
1758 enum tree_code code
= TREE_CODE (expr
);
1759 bool honor_nans
= false;
1760 bool honor_snans
= false;
1761 bool fp_operation
= false;
1762 bool honor_trapv
= false;
1765 if (TREE_CODE_CLASS (code
) == tcc_comparison
1766 || TREE_CODE_CLASS (code
) == tcc_unary
1767 || TREE_CODE_CLASS (code
) == tcc_binary
)
1769 t
= TREE_TYPE (expr
);
1770 fp_operation
= FLOAT_TYPE_P (t
);
1773 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1774 honor_snans
= flag_signaling_nans
!= 0;
1776 else if (INTEGRAL_TYPE_P (t
) && TYPE_TRAP_SIGNED (t
))
1787 case WITH_SIZE_EXPR
:
1788 expr
= TREE_OPERAND (expr
, 0);
1789 code
= TREE_CODE (expr
);
1792 case ARRAY_RANGE_REF
:
1793 /* Let us be conservative here for now. We might be checking bounds of
1794 the access similarly to the case below. */
1795 if (!TREE_THIS_NOTRAP (expr
))
1798 base
= TREE_OPERAND (expr
, 0);
1799 return tree_could_trap_p (base
);
1802 base
= TREE_OPERAND (expr
, 0);
1803 if (tree_could_trap_p (base
))
1806 if (TREE_THIS_NOTRAP (expr
))
1809 return !in_array_bounds_p (expr
);
1812 case ALIGN_INDIRECT_REF
:
1813 case MISALIGNED_INDIRECT_REF
:
1814 return !TREE_THIS_NOTRAP (expr
);
1817 return TREE_THIS_VOLATILE (expr
);
1819 case TRUNC_DIV_EXPR
:
1821 case FLOOR_DIV_EXPR
:
1822 case ROUND_DIV_EXPR
:
1823 case EXACT_DIV_EXPR
:
1825 case FLOOR_MOD_EXPR
:
1826 case ROUND_MOD_EXPR
:
1827 case TRUNC_MOD_EXPR
:
1829 if (honor_snans
|| honor_trapv
)
1832 return flag_trapping_math
;
1833 t
= TREE_OPERAND (expr
, 1);
1834 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1843 /* Some floating point comparisons may trap. */
1848 case UNORDERED_EXPR
:
1858 case FIX_TRUNC_EXPR
:
1860 case FIX_FLOOR_EXPR
:
1861 case FIX_ROUND_EXPR
:
1862 /* Conversion of floating point might trap. */
1868 /* These operations don't trap with floating point. */
1876 /* Any floating arithmetic may trap. */
1877 if (fp_operation
&& flag_trapping_math
)
1884 t
= get_callee_fndecl (expr
);
1885 /* Assume that calls to weak functions may trap. */
1886 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1891 /* Any floating arithmetic may trap. */
1892 if (fp_operation
&& flag_trapping_math
)
1899 tree_could_throw_p (tree t
)
1901 if (!flag_exceptions
)
1903 if (TREE_CODE (t
) == MODIFY_EXPR
)
1905 if (flag_non_call_exceptions
1906 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
1908 t
= TREE_OPERAND (t
, 1);
1911 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
1912 t
= TREE_OPERAND (t
, 0);
1913 if (TREE_CODE (t
) == CALL_EXPR
)
1914 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
1915 if (flag_non_call_exceptions
)
1916 return tree_could_trap_p (t
);
1921 tree_can_throw_internal (tree stmt
)
1923 int region_nr
= lookup_stmt_eh_region (stmt
);
1926 return can_throw_internal_1 (region_nr
);
1930 tree_can_throw_external (tree stmt
)
1932 int region_nr
= lookup_stmt_eh_region (stmt
);
1935 return can_throw_external_1 (region_nr
);
1939 maybe_clean_eh_stmt (tree stmt
)
1941 if (!tree_could_throw_p (stmt
))
1942 if (remove_stmt_from_eh_region (stmt
))
1947 #include "gt-tree-eh.h"