1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p
= 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p
= 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a
, const void *b
)
59 const void * const * x
= a
;
60 const void * const * y
= b
;
65 struct_ptr_hash (const void *a
)
67 const void * const * x
= a
;
68 return (size_t)*x
>> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 struct throw_stmt_node
GTY(())
90 static GTY((param_is (struct throw_stmt_node
))) htab_t throw_stmt_table
;
93 record_stmt_eh_region (struct eh_region
*region
, tree t
)
95 struct throw_stmt_node
*n
;
101 n
= ggc_alloc (sizeof (*n
));
103 n
->region_nr
= get_eh_region_number (region
);
105 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
112 add_stmt_to_eh_region (tree t
, int num
)
114 struct throw_stmt_node
*n
;
120 n
= ggc_alloc (sizeof (*n
));
124 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
131 remove_stmt_from_eh_region (tree t
)
133 struct throw_stmt_node dummy
;
136 if (!throw_stmt_table
)
140 slot
= htab_find_slot (throw_stmt_table
, &dummy
, NO_INSERT
);
143 htab_clear_slot (throw_stmt_table
, slot
);
151 lookup_stmt_eh_region (tree t
)
153 struct throw_stmt_node
*p
, n
;
155 if (!throw_stmt_table
)
159 p
= htab_find (throw_stmt_table
, &n
);
161 return (p
? p
->region_nr
: -1);
166 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
167 nodes and LABEL_DECL nodes. We will use this during the second phase to
168 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
170 struct finally_tree_node
175 /* Note that this table is *not* marked GTY. It is short-lived. */
176 static htab_t finally_tree
;
179 record_in_finally_tree (tree child
, tree parent
)
181 struct finally_tree_node
*n
;
184 n
= xmalloc (sizeof (*n
));
188 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
195 collect_finally_tree (tree t
, tree region
)
198 switch (TREE_CODE (t
))
201 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
204 case TRY_FINALLY_EXPR
:
205 record_in_finally_tree (t
, region
);
206 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
207 t
= TREE_OPERAND (t
, 1);
211 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
212 t
= TREE_OPERAND (t
, 1);
220 t
= EH_FILTER_FAILURE (t
);
225 tree_stmt_iterator i
;
226 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
227 collect_finally_tree (tsi_stmt (i
), region
);
232 /* A type, a decl, or some kind of statement that we're not
233 interested in. Don't walk them. */
238 /* Use the finally tree to determine if a jump from START to TARGET
239 would leave the try_finally node that START lives in. */
242 outside_finally_tree (tree start
, tree target
)
244 struct finally_tree_node n
, *p
;
249 p
= htab_find (finally_tree
, &n
);
254 while (start
!= target
);
259 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
260 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
261 The eh region creation is straight-forward, but frobbing all the gotos
262 and such into shape isn't. */
264 /* State of the world while lowering. */
268 /* What's "current" while constructing the eh region tree. These
269 correspond to variables of the same name in cfun->eh, which we
270 don't have easy access to. */
271 struct eh_region
*cur_region
;
272 struct eh_region
*prev_try
;
274 /* Processing of TRY_FINALLY requires a bit more state. This is
275 split out into a separate structure so that we don't have to
276 copy so much when processing other nodes. */
277 struct leh_tf_state
*tf
;
282 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
283 is the original TRY_FINALLY_EXPR. We need to retain this so that
284 outside_finally_tree can reliably reference the tree used in the
285 collect_finally_tree data structures. */
286 tree try_finally_expr
;
289 /* The state outside this try_finally node. */
290 struct leh_state
*outer
;
292 /* The exception region created for it. */
293 struct eh_region
*region
;
295 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
296 that are seen to escape this TRY_FINALLY_EXPR node. */
297 struct goto_queue_node
{
303 size_t goto_queue_size
;
304 size_t goto_queue_active
;
306 /* The set of unique labels seen as entries in the goto queue. */
307 varray_type dest_array
;
309 /* A label to be added at the end of the completed transformed
310 sequence. It will be set if may_fallthru was true *at one time*,
311 though subsequent transformations may have cleared that flag. */
314 /* A label that has been registered with except.c to be the
315 landing pad for this try block. */
318 /* True if it is possible to fall out the bottom of the try block.
319 Cleared if the fallthru is converted to a goto. */
322 /* True if any entry in goto_queue is a RETURN_EXPR. */
325 /* True if the finally block can receive an exception edge.
326 Cleared if the exception case is handled by code duplication. */
330 static void lower_eh_filter (struct leh_state
*, tree
*);
331 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
333 /* Comparison function for qsort/bsearch. We're interested in
334 searching goto queue elements for source statements. */
337 goto_queue_cmp (const void *x
, const void *y
)
339 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
340 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
341 return (a
== b
? 0 : a
< b
? -1 : 1);
344 /* Search for STMT in the goto queue. Return the replacement,
345 or null if the statement isn't in the queue. */
348 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
350 struct goto_queue_node tmp
, *ret
;
352 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
353 sizeof (struct goto_queue_node
), goto_queue_cmp
);
354 return (ret
? ret
->repl_stmt
: NULL
);
357 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
358 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
359 then we can just splat it in, otherwise we add the new stmts immediately
360 after the COND_EXPR and redirect. */
363 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
364 tree_stmt_iterator
*tsi
)
366 tree
new, one
, label
;
368 new = find_goto_replacement (tf
, *tp
);
372 one
= expr_only (new);
373 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
379 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
380 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
382 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
383 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
386 /* The real work of replace_goto_queue. Returns with TSI updated to
387 point to the next statement. */
389 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
392 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
394 switch (TREE_CODE (t
))
398 t
= find_goto_replacement (tf
, t
);
401 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
408 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
409 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
412 case TRY_FINALLY_EXPR
:
414 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
415 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
418 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
421 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
428 /* These won't have gotos in them. */
435 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
438 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
440 tree_stmt_iterator i
= tsi_start (t
);
441 while (!tsi_end_p (i
))
442 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
445 /* Replace all goto queue members. */
448 replace_goto_queue (struct leh_tf_state
*tf
)
450 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
453 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
454 node, and if so record that fact in the goto queue associated with that
458 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
460 struct leh_tf_state
*tf
= state
->tf
;
461 struct goto_queue_node
*q
;
468 switch (TREE_CODE (stmt
))
472 tree lab
= GOTO_DESTINATION (stmt
);
474 /* Computed and non-local gotos do not get processed. Given
475 their nature we can neither tell whether we've escaped the
476 finally block nor redirect them if we knew. */
477 if (TREE_CODE (lab
) != LABEL_DECL
)
480 /* No need to record gotos that don't leave the try block. */
481 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
484 if (! tf
->dest_array
)
486 VARRAY_TREE_INIT (tf
->dest_array
, 10, "dest_array");
487 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
492 int n
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
493 for (index
= 0; index
< n
; ++index
)
494 if (VARRAY_TREE (tf
->dest_array
, index
) == lab
)
497 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
503 tf
->may_return
= true;
511 active
= tf
->goto_queue_active
;
512 size
= tf
->goto_queue_size
;
515 size
= (size
? size
* 2 : 32);
516 tf
->goto_queue_size
= size
;
518 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
521 q
= &tf
->goto_queue
[active
];
522 tf
->goto_queue_active
= active
+ 1;
524 memset (q
, 0, sizeof (*q
));
529 #ifdef ENABLE_CHECKING
530 /* We do not process SWITCH_EXPRs for now. As long as the original source
531 was in fact structured, and we've not yet done jump threading, then none
532 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
535 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
537 struct leh_tf_state
*tf
= state
->tf
;
544 vec
= SWITCH_LABELS (switch_expr
);
545 n
= TREE_VEC_LENGTH (vec
);
547 for (i
= 0; i
< n
; ++i
)
549 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
550 if (outside_finally_tree (lab
, tf
->try_finally_expr
))
555 #define verify_norecord_switch_expr(state, switch_expr)
558 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
559 whatever is needed to finish the return. If MOD is non-null, insert it
560 before the new branch. RETURN_VALUE_P is a cache containing a temporary
561 variable to be used in manipulating the value returned from the function. */
564 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
565 tree
*return_value_p
)
567 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
572 /* The nasty part about redirecting the return value is that the
573 return value itself is to be computed before the FINALLY block
587 should return 0, not 1. Arrange for this to happen by copying
588 computed the return value into a local temporary. This also
589 allows us to redirect multiple return statements through the
590 same destination block; whether this is a net win or not really
591 depends, I guess, but it does make generation of the switch in
592 lower_try_finally_switch easier. */
594 if (TREE_CODE (ret_expr
) == RESULT_DECL
)
596 if (!*return_value_p
)
597 *return_value_p
= ret_expr
;
598 else if (*return_value_p
!= ret_expr
)
600 q
->cont_stmt
= q
->stmt
;
602 else if (TREE_CODE (ret_expr
) == MODIFY_EXPR
)
604 tree result
= TREE_OPERAND (ret_expr
, 0);
605 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
607 if (!*return_value_p
)
609 if (aggregate_value_p (TREE_TYPE (result
),
610 TREE_TYPE (current_function_decl
)))
611 /* If this function returns in memory, copy the argument
612 into the return slot now. Otherwise, we might need to
613 worry about magic return semantics, so we need to use a
614 temporary to hold the value until we're actually ready
618 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
619 *return_value_p
= new;
622 new = *return_value_p
;
624 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
625 append_to_statement_list (x
, &q
->repl_stmt
);
630 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
631 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
638 /* If we don't return a value, all return statements are the same. */
639 q
->cont_stmt
= q
->stmt
;
643 append_to_statement_list (mod
, &q
->repl_stmt
);
645 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
646 append_to_statement_list (x
, &q
->repl_stmt
);
649 /* Similar, but easier, for GOTO_EXPR. */
652 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
656 q
->cont_stmt
= q
->stmt
;
658 append_to_statement_list (mod
, &q
->repl_stmt
);
660 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
661 append_to_statement_list (x
, &q
->repl_stmt
);
664 /* We want to transform
665 try { body; } catch { stuff; }
667 body; goto over; lab: stuff; over:
669 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
670 should be placed before the second operand, or NULL. OVER is
671 an existing label that should be put at the exit, or NULL. */
674 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
678 op1
= TREE_OPERAND (*tp
, 1);
679 *tp
= TREE_OPERAND (*tp
, 0);
681 if (block_may_fallthru (*tp
))
684 over
= create_artificial_label ();
685 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
686 append_to_statement_list (x
, tp
);
691 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
692 append_to_statement_list (x
, tp
);
695 append_to_statement_list (op1
, tp
);
699 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
700 append_to_statement_list (x
, tp
);
704 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
705 Make sure to record all new labels found. */
708 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
712 t
= lhd_unsave_expr_now (t
);
715 region
= outer_state
->tf
->try_finally_expr
;
716 collect_finally_tree (t
, region
);
721 /* A subroutine of lower_try_finally. Create a fallthru label for
722 the given try_finally state. The only tricky bit here is that
723 we have to make sure to record the label in our outer context. */
726 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
728 tree label
= tf
->fallthru_label
;
731 label
= create_artificial_label ();
732 tf
->fallthru_label
= label
;
734 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
739 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
740 returns non-null, then the language requires that the exception path out
741 of a try_finally be treated specially. To wit: the code within the
742 finally block may not itself throw an exception. We have two choices here.
743 First we can duplicate the finally block and wrap it in a must_not_throw
744 region. Second, we can generate code like
749 if (fintmp == eh_edge)
750 protect_cleanup_actions;
753 where "fintmp" is the temporary used in the switch statement generation
754 alternative considered below. For the nonce, we always choose the first
757 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
760 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
761 struct leh_state
*this_state
,
762 struct leh_tf_state
*tf
)
764 tree protect_cleanup_actions
, finally
, x
;
765 tree_stmt_iterator i
;
766 bool finally_may_fallthru
;
768 /* First check for nothing to do. */
769 if (lang_protect_cleanup_actions
)
770 protect_cleanup_actions
= lang_protect_cleanup_actions ();
772 protect_cleanup_actions
= NULL
;
774 finally
= TREE_OPERAND (*tf
->top_p
, 1);
776 /* If the EH case of the finally block can fall through, this may be a
777 structure of the form
790 E.g. with an inline destructor with an embedded try block. In this
791 case we must save the runtime EH data around the nested exception.
793 This complication means that any time the previous runtime data might
794 be used (via fallthru from the finally) we handle the eh case here,
795 whether or not protect_cleanup_actions is active. */
797 finally_may_fallthru
= block_may_fallthru (finally
);
798 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
801 /* Duplicate the FINALLY block. Only need to do this for try-finally,
802 and not for cleanups. */
804 finally
= lower_try_finally_dup_block (finally
, outer_state
);
806 /* Resume execution after the exception. Adding this now lets
807 lower_eh_filter not add unnecessary gotos, as it is clear that
808 we never fallthru from this copy of the finally block. */
809 if (finally_may_fallthru
)
811 tree save_eptr
, save_filt
;
813 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
814 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
816 i
= tsi_start (finally
);
817 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
818 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
819 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
821 x
= build (FILTER_EXPR
, integer_type_node
);
822 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
823 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
825 i
= tsi_last (finally
);
826 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
827 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
828 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
830 x
= build (FILTER_EXPR
, integer_type_node
);
831 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
832 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
834 x
= build1 (RESX_EXPR
, void_type_node
,
835 build_int_2 (get_eh_region_number (tf
->region
), 0));
836 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
839 /* Wrap the block with protect_cleanup_actions as the action. */
840 if (protect_cleanup_actions
)
842 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
843 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
844 EH_FILTER_MUST_NOT_THROW (x
) = 1;
845 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
846 lower_eh_filter (outer_state
, &finally
);
849 lower_eh_constructs_1 (outer_state
, &finally
);
851 /* Hook this up to the end of the existing try block. If we
852 previously fell through the end, we'll have to branch around.
853 This means adding a new goto, and adding it to the queue. */
855 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
857 if (tf
->may_fallthru
)
859 x
= lower_try_finally_fallthru_label (tf
);
860 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
861 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
864 maybe_record_in_goto_queue (this_state
, x
);
866 tf
->may_fallthru
= false;
869 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
870 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
871 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
873 /* Having now been handled, EH isn't to be considered with
874 the rest of the outgoing edges. */
875 tf
->may_throw
= false;
878 /* A subroutine of lower_try_finally. We have determined that there is
879 no fallthru edge out of the finally block. This means that there is
880 no outgoing edge corresponding to any incoming edge. Restructure the
881 try_finally node for this special case. */
884 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
886 tree x
, finally
, lab
, return_val
;
887 struct goto_queue_node
*q
, *qe
;
892 lab
= create_artificial_label ();
894 finally
= TREE_OPERAND (*tf
->top_p
, 1);
895 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
897 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
898 append_to_statement_list (x
, tf
->top_p
);
902 qe
= q
+ tf
->goto_queue_active
;
905 do_return_redirection (q
, lab
, NULL
, &return_val
);
907 do_goto_redirection (q
, lab
, NULL
);
909 replace_goto_queue (tf
);
911 lower_eh_constructs_1 (state
, &finally
);
912 append_to_statement_list (finally
, tf
->top_p
);
915 /* A subroutine of lower_try_finally. We have determined that there is
916 exactly one destination of the finally block. Restructure the
917 try_finally node for this special case. */
920 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
922 struct goto_queue_node
*q
, *qe
;
923 tree x
, finally
, finally_label
;
925 finally
= TREE_OPERAND (*tf
->top_p
, 1);
926 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
928 lower_eh_constructs_1 (state
, &finally
);
932 /* Only reachable via the exception edge. Add the given label to
933 the head of the FINALLY block. Append a RESX at the end. */
935 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
936 append_to_statement_list (x
, tf
->top_p
);
938 append_to_statement_list (finally
, tf
->top_p
);
940 x
= build1 (RESX_EXPR
, void_type_node
,
941 build_int_2 (get_eh_region_number (tf
->region
), 0));
942 append_to_statement_list (x
, tf
->top_p
);
947 if (tf
->may_fallthru
)
949 /* Only reachable via the fallthru edge. Do nothing but let
950 the two blocks run together; we'll fall out the bottom. */
951 append_to_statement_list (finally
, tf
->top_p
);
955 finally_label
= create_artificial_label ();
956 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
957 append_to_statement_list (x
, tf
->top_p
);
959 append_to_statement_list (finally
, tf
->top_p
);
962 qe
= q
+ tf
->goto_queue_active
;
966 /* Reachable by return expressions only. Redirect them. */
967 tree return_val
= NULL
;
969 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
970 replace_goto_queue (tf
);
974 /* Reachable by goto expressions only. Redirect them. */
976 do_goto_redirection (q
, finally_label
, NULL
);
977 replace_goto_queue (tf
);
979 if (VARRAY_TREE (tf
->dest_array
, 0) == tf
->fallthru_label
)
981 /* Reachable by goto to fallthru label only. Redirect it
982 to the new label (already created, sadly), and do not
983 emit the final branch out, or the fallthru label. */
984 tf
->fallthru_label
= NULL
;
989 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
990 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
993 /* A subroutine of lower_try_finally. There are multiple edges incoming
994 and outgoing from the finally block. Implement this by duplicating the
995 finally block for every destination. */
998 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1000 tree finally
, new_stmt
;
1003 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1004 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1006 new_stmt
= NULL_TREE
;
1008 if (tf
->may_fallthru
)
1010 x
= lower_try_finally_dup_block (finally
, state
);
1011 lower_eh_constructs_1 (state
, &x
);
1012 append_to_statement_list (x
, &new_stmt
);
1014 x
= lower_try_finally_fallthru_label (tf
);
1015 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1016 append_to_statement_list (x
, &new_stmt
);
1021 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1022 append_to_statement_list (x
, &new_stmt
);
1024 x
= lower_try_finally_dup_block (finally
, state
);
1025 lower_eh_constructs_1 (state
, &x
);
1026 append_to_statement_list (x
, &new_stmt
);
1028 x
= build1 (RESX_EXPR
, void_type_node
,
1029 build_int_2 (get_eh_region_number (tf
->region
), 0));
1030 append_to_statement_list (x
, &new_stmt
);
1035 struct goto_queue_node
*q
, *qe
;
1036 tree return_val
= NULL
;
1041 return_index
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1044 labels
= xcalloc (sizeof (tree
), return_index
+ 1);
1047 qe
= q
+ tf
->goto_queue_active
;
1050 int index
= q
->index
< 0 ? return_index
: q
->index
;
1051 tree lab
= labels
[index
];
1052 bool build_p
= false;
1056 labels
[index
] = lab
= create_artificial_label ();
1060 if (index
== return_index
)
1061 do_return_redirection (q
, lab
, NULL
, &return_val
);
1063 do_goto_redirection (q
, lab
, NULL
);
1067 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1068 append_to_statement_list (x
, &new_stmt
);
1070 x
= lower_try_finally_dup_block (finally
, state
);
1071 lower_eh_constructs_1 (state
, &x
);
1072 append_to_statement_list (x
, &new_stmt
);
1074 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1075 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1078 replace_goto_queue (tf
);
1082 /* Need to link new stmts after running replace_goto_queue due
1083 to not wanting to process the same goto stmts twice. */
1084 append_to_statement_list (new_stmt
, tf
->top_p
);
1087 /* A subroutine of lower_try_finally. There are multiple edges incoming
1088 and outgoing from the finally block. Implement this by instrumenting
1089 each incoming edge and creating a switch statement at the end of the
1090 finally block that branches to the appropriate destination. */
1093 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1095 struct goto_queue_node
*q
, *qe
;
1096 tree return_val
= NULL
;
1097 tree finally
, finally_tmp
, finally_label
;
1098 int return_index
, eh_index
, fallthru_index
;
1099 int nlabels
, ndests
, j
, last_case_index
;
1100 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1103 /* Mash the TRY block to the head of the chain. */
1104 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1105 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1107 /* Lower the finally block itself. */
1108 lower_eh_constructs_1 (state
, &finally
);
1110 /* Prepare for switch statement generation. */
1112 nlabels
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1115 return_index
= nlabels
;
1116 eh_index
= return_index
+ tf
->may_return
;
1117 fallthru_index
= eh_index
+ tf
->may_throw
;
1118 ndests
= fallthru_index
+ tf
->may_fallthru
;
1120 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1121 finally_label
= create_artificial_label ();
1123 case_label_vec
= make_tree_vec (ndests
);
1124 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1125 NULL_TREE
, case_label_vec
);
1128 last_case_index
= 0;
1130 /* Begin inserting code for getting to the finally block. Things
1131 are done in this order to correspond to the sequence the code is
1134 if (tf
->may_fallthru
)
1136 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1137 build_int_2 (fallthru_index
, 0));
1138 append_to_statement_list (x
, tf
->top_p
);
1142 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1143 append_to_statement_list (x
, tf
->top_p
);
1147 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1148 build_int_2 (fallthru_index
, 0), NULL
,
1149 create_artificial_label ());
1150 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1153 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1154 append_to_statement_list (x
, &switch_body
);
1156 x
= lower_try_finally_fallthru_label (tf
);
1157 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1158 append_to_statement_list (x
, &switch_body
);
1163 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1164 append_to_statement_list (x
, tf
->top_p
);
1166 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1167 build_int_2 (eh_index
, 0));
1168 append_to_statement_list (x
, tf
->top_p
);
1170 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1171 build_int_2 (eh_index
, 0), NULL
,
1172 create_artificial_label ());
1173 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1176 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1177 append_to_statement_list (x
, &switch_body
);
1178 x
= build1 (RESX_EXPR
, void_type_node
,
1179 build_int_2 (get_eh_region_number (tf
->region
), 0));
1180 append_to_statement_list (x
, &switch_body
);
1183 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1184 append_to_statement_list (x
, tf
->top_p
);
1186 append_to_statement_list (finally
, tf
->top_p
);
1188 /* Redirect each incoming goto edge. */
1190 qe
= q
+ tf
->goto_queue_active
;
1191 j
= last_case_index
+ tf
->may_return
;
1192 last_case_index
+= nlabels
;
1196 int switch_id
, case_index
;
1200 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1201 build_int_2 (return_index
, 0));
1202 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1203 switch_id
= return_index
;
1207 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1208 build_int_2 (q
->index
, 0));
1209 do_goto_redirection (q
, finally_label
, mod
);
1210 switch_id
= q
->index
;
1213 case_index
= j
+ q
->index
;
1214 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1216 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1217 build_int_2 (switch_id
, 0), NULL
,
1218 create_artificial_label ());
1219 TREE_VEC_ELT (case_label_vec
, case_index
) = last_case
;
1221 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1222 append_to_statement_list (x
, &switch_body
);
1223 append_to_statement_list (q
->cont_stmt
, &switch_body
);
1224 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1227 replace_goto_queue (tf
);
1228 last_case_index
+= nlabels
;
1230 /* Make sure that the last case is the default label, as one is required.
1231 Then sort the labels, which is also required in GIMPLE. */
1232 CASE_LOW (last_case
) = NULL
;
1233 sort_case_labels (case_label_vec
);
1235 /* Need to link switch_stmt after running replace_goto_queue due
1236 to not wanting to process the same goto stmts twice. */
1237 append_to_statement_list (switch_stmt
, tf
->top_p
);
1238 append_to_statement_list (switch_body
, tf
->top_p
);
1241 /* Decide whether or not we are going to duplicate the finally block.
1242 There are several considerations.
1244 First, if this is Java, then the finally block contains code
1245 written by the user. It has line numbers associated with it,
1246 so duplicating the block means it's difficult to set a breakpoint.
1247 Since controlling code generation via -g is verboten, we simply
1248 never duplicate code without optimization.
1250 Second, we'd like to prevent egregious code growth. One way to
1251 do this is to estimate the size of the finally block, multiply
1252 that by the number of copies we'd need to make, and compare against
1253 the estimate of the size of the switch machinery we'd have to add. */
1256 decide_copy_try_finally (int ndests
, tree finally
)
1258 int f_estimate
, sw_estimate
;
1263 /* Finally estimate N times, plus N gotos. */
1264 f_estimate
= estimate_num_insns (finally
);
1265 f_estimate
= (f_estimate
+ 1) * ndests
;
1267 /* Switch statement (cost 10), N variable assignments, N gotos. */
1268 sw_estimate
= 10 + 2 * ndests
;
1270 /* Optimize for size clearly wants our best guess. */
1272 return f_estimate
< sw_estimate
;
1274 /* ??? These numbers are completely made up so far. */
1276 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1278 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1281 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1282 to a sequence of labels and blocks, plus the exception region trees
1283 that record all the magic. This is complicated by the need to
1284 arrange for the FINALLY block to be executed on all exits. */
1287 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1289 struct leh_tf_state this_tf
;
1290 struct leh_state this_state
;
1293 /* Process the try block. */
1295 memset (&this_tf
, 0, sizeof (this_tf
));
1296 this_tf
.try_finally_expr
= *tp
;
1298 this_tf
.outer
= state
;
1299 if (using_eh_for_cleanups_p
)
1301 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1303 this_tf
.region
= NULL
;
1305 this_state
.cur_region
= this_tf
.region
;
1306 this_state
.prev_try
= state
->prev_try
;
1307 this_state
.tf
= &this_tf
;
1309 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1311 /* Determine if the try block is escaped through the bottom. */
1312 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1314 /* Determine if any exceptions are possible within the try block. */
1315 if (using_eh_for_cleanups_p
)
1316 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1317 if (this_tf
.may_throw
)
1319 this_tf
.eh_label
= create_artificial_label ();
1320 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1321 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1324 /* Sort the goto queue for efficient searching later. */
1325 if (this_tf
.goto_queue_active
> 1)
1326 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1327 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1329 /* Determine how many edges (still) reach the finally block. Or rather,
1330 how many destinations are reached by the finally block. Use this to
1331 determine how we process the finally block itself. */
1333 if (this_tf
.dest_array
)
1334 ndests
= VARRAY_ACTIVE_SIZE (this_tf
.dest_array
);
1337 ndests
+= this_tf
.may_fallthru
;
1338 ndests
+= this_tf
.may_return
;
1339 ndests
+= this_tf
.may_throw
;
1341 /* If the FINALLY block is not reachable, dike it out. */
1343 *tp
= TREE_OPERAND (*tp
, 0);
1345 /* If the finally block doesn't fall through, then any destination
1346 we might try to impose there isn't reached either. There may be
1347 some minor amount of cleanup and redirection still needed. */
1348 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1349 lower_try_finally_nofallthru (state
, &this_tf
);
1351 /* We can easily special-case redirection to a single destination. */
1352 else if (ndests
== 1)
1353 lower_try_finally_onedest (state
, &this_tf
);
1355 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1356 lower_try_finally_copy (state
, &this_tf
);
1358 lower_try_finally_switch (state
, &this_tf
);
1360 /* If someone requested we add a label at the end of the transformed
1362 if (this_tf
.fallthru_label
)
1364 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1365 append_to_statement_list (x
, tp
);
1368 if (this_tf
.goto_queue
)
1369 free (this_tf
.goto_queue
);
1372 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1373 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1374 exception region trees that record all the magic. */
1377 lower_catch (struct leh_state
*state
, tree
*tp
)
1379 struct eh_region
*try_region
;
1380 struct leh_state this_state
;
1381 tree_stmt_iterator i
;
1384 try_region
= gen_eh_region_try (state
->cur_region
);
1385 this_state
.cur_region
= try_region
;
1386 this_state
.prev_try
= try_region
;
1387 this_state
.tf
= state
->tf
;
1389 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1391 if (!get_eh_region_may_contain_throw (try_region
))
1393 *tp
= TREE_OPERAND (*tp
, 0);
1398 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1400 struct eh_region
*catch_region
;
1401 tree
catch, x
, eh_label
;
1403 catch = tsi_stmt (i
);
1404 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1406 this_state
.cur_region
= catch_region
;
1407 this_state
.prev_try
= state
->prev_try
;
1408 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1410 eh_label
= create_artificial_label ();
1411 set_eh_region_tree_label (catch_region
, eh_label
);
1413 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1414 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1416 if (block_may_fallthru (CATCH_BODY (catch)))
1419 out_label
= create_artificial_label ();
1421 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1422 append_to_statement_list (x
, &CATCH_BODY (catch));
1425 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1429 frob_into_branch_around (tp
, NULL
, out_label
);
1432 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1433 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1434 region trees that record all the magic. */
1437 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1439 struct leh_state this_state
;
1440 struct eh_region
*this_region
;
1441 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1444 if (EH_FILTER_MUST_NOT_THROW (inner
))
1445 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1447 this_region
= gen_eh_region_allowed (state
->cur_region
,
1448 EH_FILTER_TYPES (inner
));
1449 this_state
= *state
;
1450 this_state
.cur_region
= this_region
;
1452 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1454 if (!get_eh_region_may_contain_throw (this_region
))
1456 *tp
= TREE_OPERAND (*tp
, 0);
1460 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1461 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1463 eh_label
= create_artificial_label ();
1464 set_eh_region_tree_label (this_region
, eh_label
);
1466 frob_into_branch_around (tp
, eh_label
, NULL
);
1469 /* Implement a cleanup expression. This is similar to try-finally,
1470 except that we only execute the cleanup block for exception edges. */
1473 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1475 struct leh_state this_state
;
1476 struct eh_region
*this_region
;
1477 struct leh_tf_state fake_tf
;
1479 /* If not using eh, then exception-only cleanups are no-ops. */
1480 if (!flag_exceptions
)
1482 *tp
= TREE_OPERAND (*tp
, 0);
1483 lower_eh_constructs_1 (state
, tp
);
1487 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1488 this_state
= *state
;
1489 this_state
.cur_region
= this_region
;
1491 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1493 if (!get_eh_region_may_contain_throw (this_region
))
1495 *tp
= TREE_OPERAND (*tp
, 0);
1499 /* Build enough of a try-finally state so that we can reuse
1500 honor_protect_cleanup_actions. */
1501 memset (&fake_tf
, 0, sizeof (fake_tf
));
1503 fake_tf
.outer
= state
;
1504 fake_tf
.region
= this_region
;
1505 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1506 fake_tf
.may_throw
= true;
1508 fake_tf
.eh_label
= create_artificial_label ();
1509 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1511 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1513 if (fake_tf
.may_throw
)
1515 /* In this case honor_protect_cleanup_actions had nothing to do,
1516 and we should process this normally. */
1517 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1518 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1522 /* In this case honor_protect_cleanup_actions did nearly all of
1523 the work. All we have left is to append the fallthru_label. */
1525 *tp
= TREE_OPERAND (*tp
, 0);
1526 if (fake_tf
.fallthru_label
)
1528 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1529 append_to_statement_list (x
, tp
);
1534 /* Main loop for lowering eh constructs. */
1537 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1539 tree_stmt_iterator i
;
1542 switch (TREE_CODE (t
))
1545 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1546 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1550 /* Look for things that can throw exceptions, and record them. */
1551 if (state
->cur_region
&& tree_could_throw_p (t
))
1553 record_stmt_eh_region (state
->cur_region
, t
);
1554 note_eh_region_may_contain_throw (state
->cur_region
);
1559 /* Look for things that can throw exceptions, and record them. */
1560 if (state
->cur_region
&& tree_could_throw_p (t
))
1564 record_stmt_eh_region (state
->cur_region
, t
);
1565 note_eh_region_may_contain_throw (state
->cur_region
);
1567 /* ??? For the benefit of calls.c, converting all this to rtl,
1568 we need to record the call expression, not just the outer
1569 modify statement. */
1570 op
= get_call_expr_in (t
);
1572 record_stmt_eh_region (state
->cur_region
, op
);
1578 maybe_record_in_goto_queue (state
, t
);
1581 verify_norecord_switch_expr (state
, t
);
1584 case TRY_FINALLY_EXPR
:
1585 lower_try_finally (state
, tp
);
1588 case TRY_CATCH_EXPR
:
1589 i
= tsi_start (TREE_OPERAND (t
, 1));
1590 switch (TREE_CODE (tsi_stmt (i
)))
1593 lower_catch (state
, tp
);
1595 case EH_FILTER_EXPR
:
1596 lower_eh_filter (state
, tp
);
1599 lower_cleanup (state
, tp
);
1604 case STATEMENT_LIST
:
1605 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1607 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1609 if (TREE_CODE (t
) == STATEMENT_LIST
)
1611 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1620 /* A type, a decl, or some kind of statement that we're not
1621 interested in. Don't walk them. */
1627 lower_eh_constructs (void)
1629 struct leh_state null_state
;
1630 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1632 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1633 throw_stmt_table
= htab_create_ggc (31, struct_ptr_hash
, struct_ptr_eq
,
1636 collect_finally_tree (*tp
, NULL
);
1638 memset (&null_state
, 0, sizeof (null_state
));
1639 lower_eh_constructs_1 (&null_state
, tp
);
1641 htab_delete (finally_tree
);
1643 collect_eh_region_array ();
1646 struct tree_opt_pass pass_lower_eh
=
1650 lower_eh_constructs
, /* execute */
1653 0, /* static_pass_number */
1654 TV_TREE_EH
, /* tv_id */
1655 PROP_gimple_lcf
, /* properties_required */
1656 PROP_gimple_leh
, /* properties_provided */
1657 PROP_gimple_lcf
, /* properties_destroyed */
1658 0, /* todo_flags_start */
1659 TODO_dump_func
/* todo_flags_finish */
1663 /* Construct EH edges for STMT. */
1666 make_eh_edge (struct eh_region
*region
, void *data
)
1669 basic_block src
, dst
;
1672 lab
= get_eh_region_tree_label (region
);
1674 src
= bb_for_stmt (stmt
);
1675 dst
= label_to_block (lab
);
1677 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1681 make_eh_edges (tree stmt
)
1686 if (TREE_CODE (stmt
) == RESX_EXPR
)
1688 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1693 region_nr
= lookup_stmt_eh_region (stmt
);
1699 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1704 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1705 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1706 This routine expects only GIMPLE lhs or rhs input. */
1709 tree_could_trap_p (tree expr
)
1711 enum tree_code code
= TREE_CODE (expr
);
1712 bool honor_nans
= false;
1713 bool honor_snans
= false;
1714 bool fp_operation
= false;
1715 bool honor_trapv
= false;
1718 if (TREE_CODE_CLASS (code
) == '<'
1719 || TREE_CODE_CLASS (code
) == '1'
1720 || TREE_CODE_CLASS (code
) == '2')
1722 t
= TREE_TYPE (expr
);
1723 fp_operation
= FLOAT_TYPE_P (t
);
1726 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1727 honor_snans
= flag_signaling_nans
!= 0;
1729 else if (INTEGRAL_TYPE_P (t
) && TYPE_TRAP_SIGNED (t
))
1740 case WITH_SIZE_EXPR
:
1741 expr
= TREE_OPERAND (expr
, 0);
1742 code
= TREE_CODE (expr
);
1745 case ARRAY_RANGE_REF
:
1746 /* Let us be conservative here for now. We might be checking bounds of
1747 the access similarly to the case below. */
1748 if (!TREE_THIS_NOTRAP (expr
))
1751 base
= TREE_OPERAND (expr
, 0);
1752 return tree_could_trap_p (base
);
1755 base
= TREE_OPERAND (expr
, 0);
1756 idx
= TREE_OPERAND (expr
, 1);
1757 if (tree_could_trap_p (base
))
1760 if (TREE_THIS_NOTRAP (expr
))
1763 return !in_array_bounds_p (expr
);
1766 return !TREE_THIS_NOTRAP (expr
);
1769 return TREE_THIS_VOLATILE (expr
);
1771 case TRUNC_DIV_EXPR
:
1773 case FLOOR_DIV_EXPR
:
1774 case ROUND_DIV_EXPR
:
1775 case EXACT_DIV_EXPR
:
1777 case FLOOR_MOD_EXPR
:
1778 case ROUND_MOD_EXPR
:
1779 case TRUNC_MOD_EXPR
:
1781 if (honor_snans
|| honor_trapv
)
1783 if (fp_operation
&& flag_trapping_math
)
1785 t
= TREE_OPERAND (expr
, 1);
1786 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1795 /* Some floating point comparisons may trap. */
1800 case UNORDERED_EXPR
:
1810 case FIX_TRUNC_EXPR
:
1812 case FIX_FLOOR_EXPR
:
1813 case FIX_ROUND_EXPR
:
1814 /* Conversion of floating point might trap. */
1820 /* These operations don't trap with floating point. */
1828 /* Any floating arithmetic may trap. */
1829 if (fp_operation
&& flag_trapping_math
)
1836 /* Any floating arithmetic may trap. */
1837 if (fp_operation
&& flag_trapping_math
)
1844 tree_could_throw_p (tree t
)
1846 if (!flag_exceptions
)
1848 if (TREE_CODE (t
) == MODIFY_EXPR
)
1850 if (flag_non_call_exceptions
1851 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
1853 t
= TREE_OPERAND (t
, 1);
1856 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
1857 t
= TREE_OPERAND (t
, 0);
1858 if (TREE_CODE (t
) == CALL_EXPR
)
1859 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
1860 if (flag_non_call_exceptions
)
1861 return tree_could_trap_p (t
);
1866 tree_can_throw_internal (tree stmt
)
1868 int region_nr
= lookup_stmt_eh_region (stmt
);
1871 return can_throw_internal_1 (region_nr
);
1875 tree_can_throw_external (tree stmt
)
1877 int region_nr
= lookup_stmt_eh_region (stmt
);
1880 return can_throw_external_1 (region_nr
);
1884 maybe_clean_eh_stmt (tree stmt
)
1886 if (!tree_could_throw_p (stmt
))
1887 if (remove_stmt_from_eh_region (stmt
))
1892 #include "gt-tree-eh.h"