1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 extern int using_eh_for_cleanups_p
;
43 /* Misc functions used in this file. */
45 /* Compare and hash for any structure which begins with a canonical
46 pointer. Assumes all pointers are interchangable, which is sort
47 of already assumed by gcc elsewhere IIRC. */
50 struct_ptr_eq (const void *a
, const void *b
)
52 const void * const * x
= a
;
53 const void * const * y
= b
;
58 struct_ptr_hash (const void *a
)
60 const void * const * x
= a
;
61 return (size_t)*x
>> 4;
65 /* Remember and lookup EH region data for arbitrary statements.
66 Really this means any statement that could_throw_p. We could
67 stuff this information into the stmt_ann data structure, but:
69 (1) We absolutely rely on this information being kept until
70 we get to rtl. Once we're done with lowering here, if we lose
71 the information there's no way to recover it!
73 (2) There are many more statements that *cannot* throw as
74 compared to those that can. We should be saving some amount
75 of space by only allocating memory for those that can throw. */
77 struct throw_stmt_node
GTY(())
83 static GTY((param_is (struct throw_stmt_node
))) htab_t throw_stmt_table
;
86 record_stmt_eh_region (struct eh_region
*region
, tree t
)
88 struct throw_stmt_node
*n
;
94 n
= ggc_alloc (sizeof (*n
));
96 n
->region_nr
= get_eh_region_number (region
);
98 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
105 add_stmt_to_eh_region (tree t
, int num
)
107 struct throw_stmt_node
*n
;
113 n
= ggc_alloc (sizeof (*n
));
117 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
124 lookup_stmt_eh_region (tree t
)
126 struct throw_stmt_node
*p
, n
;
128 if (!throw_stmt_table
)
132 p
= htab_find (throw_stmt_table
, &n
);
134 return (p
? p
->region_nr
: -1);
139 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
140 nodes and LABEL_DECL nodes. We will use this during the second phase to
141 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
143 struct finally_tree_node
148 /* Note that this table is *not* marked GTY. It is short-lived. */
149 static htab_t finally_tree
;
152 record_in_finally_tree (tree child
, tree parent
)
154 struct finally_tree_node
*n
;
157 n
= xmalloc (sizeof (*n
));
161 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
168 collect_finally_tree (tree t
, tree region
)
171 switch (TREE_CODE (t
))
174 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
177 case TRY_FINALLY_EXPR
:
178 record_in_finally_tree (t
, region
);
179 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
180 t
= TREE_OPERAND (t
, 1);
184 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
185 t
= TREE_OPERAND (t
, 1);
193 t
= EH_FILTER_FAILURE (t
);
198 tree_stmt_iterator i
;
199 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
200 collect_finally_tree (tsi_stmt (i
), region
);
205 /* A type, a decl, or some kind of statement that we're not
206 interested in. Don't walk them. */
211 /* Use the finally tree to determine if a jump from START to TARGET
212 would leave the try_finally node that START lives in. */
215 outside_finally_tree (tree start
, tree target
)
217 struct finally_tree_node n
, *p
;
222 p
= htab_find (finally_tree
, &n
);
227 while (start
!= target
);
232 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
233 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
234 The eh region creation is straight-forward, but frobbing all the gotos
235 and such into shape isn't. */
237 /* State of the world while lowering. */
241 /* What's "current" while constructing the eh region tree. These
242 correspond to variables of the same name in cfun->eh, which we
243 don't have easy access to. */
244 struct eh_region
*cur_region
;
245 struct eh_region
*prev_try
;
247 /* Processing of TRY_FINALLY requires a bit more state. This is
248 split out into a separate structure so that we don't have to
249 copy so much when processing other nodes. */
250 struct leh_tf_state
*tf
;
255 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
256 is the original TRY_FINALLY_EXPR. We need to retain this so that
257 outside_finally_tree can reliably reference the tree used in the
258 collect_finally_tree data structures. */
259 tree try_finally_expr
;
262 /* The state outside this try_finally node. */
263 struct leh_state
*outer
;
265 /* The exception region created for it. */
266 struct eh_region
*region
;
268 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
269 that are seen to escape this TRY_FINALLY_EXPR node. */
270 struct goto_queue_node
{
276 size_t goto_queue_size
;
277 size_t goto_queue_active
;
279 /* The set of unique labels seen as entries in the goto queue. */
280 varray_type dest_array
;
282 /* A label to be added at the end of the completed transformed
283 sequence. It will be set if may_fallthru was true *at one time*,
284 though subsequent transformations may have cleared that flag. */
287 /* A label that has been registered with except.c to be the
288 landing pad for this try block. */
291 /* True if it is possible to fall out the bottom of the try block.
292 Cleared if the fallthru is converted to a goto. */
295 /* True if any entry in goto_queue is a RETURN_EXPR. */
298 /* True if the finally block can receive an exception edge.
299 Cleared if the exception case is handled by code duplication. */
303 static void lower_eh_filter (struct leh_state
*, tree
*);
304 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
306 /* Comparison function for qsort/bsearch. We're interested in
307 searching goto queue elements for source statements. */
310 goto_queue_cmp (const void *x
, const void *y
)
312 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
313 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
314 return (a
== b
? 0 : a
< b
? -1 : 1);
317 /* Search for STMT in the goto queue. Return the replacement,
318 or null if the statement isn't in the queue. */
321 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
323 struct goto_queue_node tmp
, *ret
;
325 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
326 sizeof (struct goto_queue_node
), goto_queue_cmp
);
327 return (ret
? ret
->repl_stmt
: NULL
);
330 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
331 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
332 then we can just splat it in, otherwise we add the new stmts immediately
333 after the COND_EXPR and redirect. */
336 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
337 tree_stmt_iterator
*tsi
)
339 tree
new, one
, label
;
341 new = find_goto_replacement (tf
, *tp
);
345 one
= expr_only (new);
346 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
352 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
353 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
355 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
356 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
359 /* The real work of replace_goto_queue. Returns with TSI updated to
360 point to the next statement. */
362 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
365 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
367 switch (TREE_CODE (t
))
371 t
= find_goto_replacement (tf
, t
);
374 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
381 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
382 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
385 case TRY_FINALLY_EXPR
:
387 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
388 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
391 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
394 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
401 /* These won't have gotos in them. */
408 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
411 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
413 tree_stmt_iterator i
= tsi_start (t
);
414 while (!tsi_end_p (i
))
415 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
418 /* Replace all goto queue members. */
421 replace_goto_queue (struct leh_tf_state
*tf
)
423 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
426 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
427 node, and if so record that fact in the goto queue associated with that
431 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
433 struct leh_tf_state
*tf
= state
->tf
;
434 struct goto_queue_node
*q
;
441 switch (TREE_CODE (stmt
))
445 tree lab
= GOTO_DESTINATION (stmt
);
447 /* Computed and non-local gotos do not get processed. Given
448 their nature we can neither tell whether we've escaped the
449 finally block nor redirect them if we knew. */
450 if (TREE_CODE (lab
) != LABEL_DECL
)
453 /* No need to record gotos that don't leave the try block. */
454 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
457 if (! tf
->dest_array
)
459 VARRAY_TREE_INIT (tf
->dest_array
, 10, "dest_array");
460 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
465 int n
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
466 for (index
= 0; index
< n
; ++index
)
467 if (VARRAY_TREE (tf
->dest_array
, index
) == lab
)
470 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
476 tf
->may_return
= true;
484 active
= tf
->goto_queue_active
;
485 size
= tf
->goto_queue_size
;
488 size
= (size
? size
* 2 : 32);
489 tf
->goto_queue_size
= size
;
491 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
494 q
= &tf
->goto_queue
[active
];
495 tf
->goto_queue_active
= active
+ 1;
497 memset (q
, 0, sizeof (*q
));
502 #ifdef ENABLE_CHECKING
503 /* We do not process SWITCH_EXPRs for now. As long as the original source
504 was in fact structured, and we've not yet done jump threading, then none
505 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
508 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
510 struct leh_tf_state
*tf
= state
->tf
;
517 vec
= SWITCH_LABELS (switch_expr
);
518 n
= TREE_VEC_LENGTH (vec
);
520 for (i
= 0; i
< n
; ++i
)
522 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
523 if (outside_finally_tree (lab
, tf
->try_finally_expr
))
528 #define verify_norecord_switch_expr(state, switch_expr)
531 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
532 whatever is needed to finish the return. If MOD is non-null, insert it
533 before the new branch. RETURN_VALUE_P is a cache containing a temporary
534 variable to be used in manipulating the value returned from the function. */
537 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
538 tree
*return_value_p
)
540 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
545 /* The nasty part about redirecting the return value is that the
546 return value itself is to be computed before the FINALLY block
560 should return 0, not 1. Arrange for this to happen by copying
561 computed the return value into a local temporary. This also
562 allows us to redirect multiple return statements through the
563 same destination block; whether this is a net win or not really
564 depends, I guess, but it does make generation of the switch in
565 lower_try_finally_switch easier. */
567 if (TREE_CODE (ret_expr
) == RESULT_DECL
)
569 if (!*return_value_p
)
570 *return_value_p
= ret_expr
;
571 else if (*return_value_p
!= ret_expr
)
573 q
->cont_stmt
= q
->stmt
;
575 else if (TREE_CODE (ret_expr
) == MODIFY_EXPR
)
577 tree result
= TREE_OPERAND (ret_expr
, 0);
578 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
580 if (!*return_value_p
)
582 if (aggregate_value_p (TREE_TYPE (result
),
583 TREE_TYPE (current_function_decl
)))
584 /* If this function returns in memory, copy the argument
585 into the return slot now. Otherwise, we might need to
586 worry about magic return semantics, so we need to use a
587 temporary to hold the value until we're actually ready
591 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
592 *return_value_p
= new;
595 new = *return_value_p
;
597 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
598 append_to_statement_list (x
, &q
->repl_stmt
);
603 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
604 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
611 /* If we don't return a value, all return statements are the same. */
612 q
->cont_stmt
= q
->stmt
;
616 append_to_statement_list (mod
, &q
->repl_stmt
);
618 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
619 append_to_statement_list (x
, &q
->repl_stmt
);
622 /* Similar, but easier, for GOTO_EXPR. */
625 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
629 q
->cont_stmt
= q
->stmt
;
631 append_to_statement_list (mod
, &q
->repl_stmt
);
633 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
634 append_to_statement_list (x
, &q
->repl_stmt
);
637 /* We want to transform
638 try { body; } catch { stuff; }
640 body; goto over; lab: stuff; over:
642 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
643 should be placed before the second operand, or NULL. OVER is
644 an existing label that should be put at the exit, or NULL. */
647 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
651 op1
= TREE_OPERAND (*tp
, 1);
652 *tp
= TREE_OPERAND (*tp
, 0);
654 if (block_may_fallthru (*tp
))
657 over
= create_artificial_label ();
658 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
659 append_to_statement_list (x
, tp
);
664 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
665 append_to_statement_list (x
, tp
);
668 append_to_statement_list (op1
, tp
);
672 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
673 append_to_statement_list (x
, tp
);
677 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
678 Make sure to record all new labels found. */
681 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
685 t
= lhd_unsave_expr_now (t
);
688 region
= outer_state
->tf
->try_finally_expr
;
689 collect_finally_tree (t
, region
);
694 /* A subroutine of lower_try_finally. Create a fallthru label for
695 the given try_finally state. The only tricky bit here is that
696 we have to make sure to record the label in our outer context. */
699 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
701 tree label
= tf
->fallthru_label
;
704 label
= create_artificial_label ();
705 tf
->fallthru_label
= label
;
707 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
712 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
713 returns non-null, then the language requires that the exception path out
714 of a try_finally be treated specially. To wit: the code within the
715 finally block may not itself throw an exception. We have two choices here.
716 First we can duplicate the finally block and wrap it in a must_not_throw
717 region. Second, we can generate code like
722 if (fintmp == eh_edge)
723 protect_cleanup_actions;
726 where "fintmp" is the temporary used in the switch statement generation
727 alternative considered below. For the nonce, we always choose the first
730 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
733 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
734 struct leh_state
*this_state
,
735 struct leh_tf_state
*tf
)
737 tree protect_cleanup_actions
, finally
, x
;
738 tree_stmt_iterator i
;
739 bool finally_may_fallthru
;
741 /* First check for nothing to do. */
742 if (lang_protect_cleanup_actions
)
743 protect_cleanup_actions
= lang_protect_cleanup_actions ();
745 protect_cleanup_actions
= NULL
;
747 finally
= TREE_OPERAND (*tf
->top_p
, 1);
749 /* If the EH case of the finally block can fall through, this may be a
750 structure of the form
763 E.g. with an inline destructor with an embedded try block. In this
764 case we must save the runtime EH data around the nested exception.
766 This complication means that any time the previous runtime data might
767 be used (via fallthru from the finally) we handle the eh case here,
768 whether or not protect_cleanup_actions is active. */
770 finally_may_fallthru
= block_may_fallthru (finally
);
771 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
774 /* Duplicate the FINALLY block. Only need to do this for try-finally,
775 and not for cleanups. */
777 finally
= lower_try_finally_dup_block (finally
, outer_state
);
779 /* Resume execution after the exception. Adding this now lets
780 lower_eh_filter not add unnecessary gotos, as it is clear that
781 we never fallthru from this copy of the finally block. */
782 if (finally_may_fallthru
)
784 tree save_eptr
, save_filt
;
786 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
787 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
789 i
= tsi_start (finally
);
790 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
791 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
792 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
794 x
= build (FILTER_EXPR
, integer_type_node
);
795 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
796 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
798 i
= tsi_last (finally
);
799 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
800 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
801 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
803 x
= build (FILTER_EXPR
, integer_type_node
);
804 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
805 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
807 x
= build1 (RESX_EXPR
, void_type_node
,
808 build_int_2 (get_eh_region_number (tf
->region
), 0));
809 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
812 /* Wrap the block with protect_cleanup_actions as the action. */
813 if (protect_cleanup_actions
)
815 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
816 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
817 EH_FILTER_MUST_NOT_THROW (x
) = 1;
818 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
819 lower_eh_filter (outer_state
, &finally
);
822 lower_eh_constructs_1 (outer_state
, &finally
);
824 /* Hook this up to the end of the existing try block. If we
825 previously fell through the end, we'll have to branch around.
826 This means adding a new goto, and adding it to the queue. */
828 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
830 if (tf
->may_fallthru
)
832 x
= lower_try_finally_fallthru_label (tf
);
833 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
834 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
837 maybe_record_in_goto_queue (this_state
, x
);
839 tf
->may_fallthru
= false;
842 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
843 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
844 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
846 /* Having now been handled, EH isn't to be considered with
847 the rest of the outgoing edges. */
848 tf
->may_throw
= false;
851 /* A subroutine of lower_try_finally. We have determined that there is
852 no fallthru edge out of the finally block. This means that there is
853 no outgoing edge corresponding to any incoming edge. Restructure the
854 try_finally node for this special case. */
857 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
859 tree x
, finally
, lab
, return_val
;
860 struct goto_queue_node
*q
, *qe
;
865 lab
= create_artificial_label ();
867 finally
= TREE_OPERAND (*tf
->top_p
, 1);
868 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
870 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
871 append_to_statement_list (x
, tf
->top_p
);
875 qe
= q
+ tf
->goto_queue_active
;
878 do_return_redirection (q
, lab
, NULL
, &return_val
);
880 do_goto_redirection (q
, lab
, NULL
);
882 replace_goto_queue (tf
);
884 lower_eh_constructs_1 (state
, &finally
);
885 append_to_statement_list (finally
, tf
->top_p
);
888 /* A subroutine of lower_try_finally. We have determined that there is
889 exactly one destination of the finally block. Restructure the
890 try_finally node for this special case. */
893 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
895 struct goto_queue_node
*q
, *qe
;
896 tree x
, finally
, finally_label
;
898 finally
= TREE_OPERAND (*tf
->top_p
, 1);
899 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
901 lower_eh_constructs_1 (state
, &finally
);
905 /* Only reachable via the exception edge. Add the given label to
906 the head of the FINALLY block. Append a RESX at the end. */
908 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
909 append_to_statement_list (x
, tf
->top_p
);
911 append_to_statement_list (finally
, tf
->top_p
);
913 x
= build1 (RESX_EXPR
, void_type_node
,
914 build_int_2 (get_eh_region_number (tf
->region
), 0));
915 append_to_statement_list (x
, tf
->top_p
);
920 if (tf
->may_fallthru
)
922 /* Only reachable via the fallthru edge. Do nothing but let
923 the two blocks run together; we'll fall out the bottom. */
924 append_to_statement_list (finally
, tf
->top_p
);
928 finally_label
= create_artificial_label ();
929 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
930 append_to_statement_list (x
, tf
->top_p
);
932 append_to_statement_list (finally
, tf
->top_p
);
935 qe
= q
+ tf
->goto_queue_active
;
939 /* Reachable by return expressions only. Redirect them. */
940 tree return_val
= NULL
;
942 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
943 replace_goto_queue (tf
);
947 /* Reachable by goto expressions only. Redirect them. */
949 do_goto_redirection (q
, finally_label
, NULL
);
950 replace_goto_queue (tf
);
952 if (VARRAY_TREE (tf
->dest_array
, 0) == tf
->fallthru_label
)
954 /* Reachable by goto to fallthru label only. Redirect it
955 to the new label (already created, sadly), and do not
956 emit the final branch out, or the fallthru label. */
957 tf
->fallthru_label
= NULL
;
962 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
963 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
966 /* A subroutine of lower_try_finally. There are multiple edges incoming
967 and outgoing from the finally block. Implement this by duplicating the
968 finally block for every destination. */
971 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
973 tree finally
, new_stmt
;
976 finally
= TREE_OPERAND (*tf
->top_p
, 1);
977 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
979 new_stmt
= NULL_TREE
;
981 if (tf
->may_fallthru
)
983 x
= lower_try_finally_dup_block (finally
, state
);
984 lower_eh_constructs_1 (state
, &x
);
985 append_to_statement_list (x
, &new_stmt
);
987 x
= lower_try_finally_fallthru_label (tf
);
988 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
989 append_to_statement_list (x
, &new_stmt
);
994 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
995 append_to_statement_list (x
, &new_stmt
);
997 x
= lower_try_finally_dup_block (finally
, state
);
998 lower_eh_constructs_1 (state
, &x
);
999 append_to_statement_list (x
, &new_stmt
);
1001 x
= build1 (RESX_EXPR
, void_type_node
,
1002 build_int_2 (get_eh_region_number (tf
->region
), 0));
1003 append_to_statement_list (x
, &new_stmt
);
1008 struct goto_queue_node
*q
, *qe
;
1009 tree return_val
= NULL
;
1014 return_index
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1017 labels
= xcalloc (sizeof (tree
), return_index
+ 1);
1020 qe
= q
+ tf
->goto_queue_active
;
1023 int index
= q
->index
< 0 ? return_index
: q
->index
;
1024 tree lab
= labels
[index
];
1025 bool build_p
= false;
1029 labels
[index
] = lab
= create_artificial_label ();
1033 if (index
== return_index
)
1034 do_return_redirection (q
, lab
, NULL
, &return_val
);
1036 do_goto_redirection (q
, lab
, NULL
);
1040 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1041 append_to_statement_list (x
, &new_stmt
);
1043 x
= lower_try_finally_dup_block (finally
, state
);
1044 lower_eh_constructs_1 (state
, &x
);
1045 append_to_statement_list (x
, &new_stmt
);
1047 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1048 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1051 replace_goto_queue (tf
);
1055 /* Need to link new stmts after running replace_goto_queue due
1056 to not wanting to process the same goto stmts twice. */
1057 append_to_statement_list (new_stmt
, tf
->top_p
);
1060 /* A subroutine of lower_try_finally. There are multiple edges incoming
1061 and outgoing from the finally block. Implement this by instrumenting
1062 each incoming edge and creating a switch statement at the end of the
1063 finally block that branches to the appropriate destination. */
1066 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1068 struct goto_queue_node
*q
, *qe
;
1069 tree return_val
= NULL
;
1070 tree finally
, finally_tmp
, finally_label
;
1071 int return_index
, eh_index
, fallthru_index
;
1072 int nlabels
, ndests
, j
, last_case_index
;
1073 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1076 /* Mash the TRY block to the head of the chain. */
1077 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1078 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1080 /* Lower the finally block itself. */
1081 lower_eh_constructs_1 (state
, &finally
);
1083 /* Prepare for switch statement generation. */
1085 nlabels
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1088 return_index
= nlabels
;
1089 eh_index
= return_index
+ tf
->may_return
;
1090 fallthru_index
= eh_index
+ tf
->may_throw
;
1091 ndests
= fallthru_index
+ tf
->may_fallthru
;
1093 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1094 finally_label
= create_artificial_label ();
1096 case_label_vec
= make_tree_vec (ndests
);
1097 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1098 NULL_TREE
, case_label_vec
);
1101 last_case_index
= 0;
1103 /* Begin inserting code for getting to the finally block. Things
1104 are done in this order to correspond to the sequence the code is
1107 if (tf
->may_fallthru
)
1109 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1110 build_int_2 (fallthru_index
, 0));
1111 append_to_statement_list (x
, tf
->top_p
);
1115 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1116 append_to_statement_list (x
, tf
->top_p
);
1120 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1121 build_int_2 (fallthru_index
, 0), NULL
,
1122 create_artificial_label ());
1123 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1126 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1127 append_to_statement_list (x
, &switch_body
);
1129 x
= lower_try_finally_fallthru_label (tf
);
1130 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1131 append_to_statement_list (x
, &switch_body
);
1136 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1137 append_to_statement_list (x
, tf
->top_p
);
1139 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1140 build_int_2 (eh_index
, 0));
1141 append_to_statement_list (x
, tf
->top_p
);
1143 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1144 build_int_2 (eh_index
, 0), NULL
,
1145 create_artificial_label ());
1146 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1149 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1150 append_to_statement_list (x
, &switch_body
);
1151 x
= build1 (RESX_EXPR
, void_type_node
,
1152 build_int_2 (get_eh_region_number (tf
->region
), 0));
1153 append_to_statement_list (x
, &switch_body
);
1156 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1157 append_to_statement_list (x
, tf
->top_p
);
1159 append_to_statement_list (finally
, tf
->top_p
);
1161 /* Redirect each incoming goto edge. */
1163 qe
= q
+ tf
->goto_queue_active
;
1164 j
= last_case_index
+ tf
->may_return
;
1165 last_case_index
+= nlabels
;
1169 int switch_id
, case_index
;
1173 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1174 build_int_2 (return_index
, 0));
1175 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1176 switch_id
= return_index
;
1180 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1181 build_int_2 (q
->index
, 0));
1182 do_goto_redirection (q
, finally_label
, mod
);
1183 switch_id
= q
->index
;
1186 case_index
= j
+ q
->index
;
1187 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1189 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1190 build_int_2 (switch_id
, 0), NULL
,
1191 create_artificial_label ());
1192 TREE_VEC_ELT (case_label_vec
, case_index
) = last_case
;
1194 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1195 append_to_statement_list (x
, &switch_body
);
1196 append_to_statement_list (q
->cont_stmt
, &switch_body
);
1197 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1200 replace_goto_queue (tf
);
1201 last_case_index
+= nlabels
;
1203 /* Make sure that we have a default label, as one is required. */
1204 CASE_LOW (last_case
) = NULL
;
1206 /* Need to link switch_stmt after running replace_goto_queue due
1207 to not wanting to process the same goto stmts twice. */
1208 append_to_statement_list (switch_stmt
, tf
->top_p
);
1209 append_to_statement_list (switch_body
, tf
->top_p
);
1212 /* Decide whether or not we are going to duplicate the finally block.
1213 There are several considerations.
1215 First, if this is Java, then the finally block contains code
1216 written by the user. It has line numbers associated with it,
1217 so duplicating the block means it's difficult to set a breakpoint.
1218 Since controlling code generation via -g is verboten, we simply
1219 never duplicate code without optimization.
1221 Second, we'd like to prevent egregious code growth. One way to
1222 do this is to estimate the size of the finally block, multiply
1223 that by the number of copies we'd need to make, and compare against
1224 the estimate of the size of the switch machinery we'd have to add. */
1227 decide_copy_try_finally (int ndests
, tree finally
)
1229 int f_estimate
, sw_estimate
;
1234 /* Finally estimate N times, plus N gotos. */
1235 f_estimate
= estimate_num_insns (finally
);
1236 f_estimate
= (f_estimate
+ 1) * ndests
;
1238 /* Switch statement (cost 10), N variable assignments, N gotos. */
1239 sw_estimate
= 10 + 2 * ndests
;
1241 /* Optimize for size clearly wants our best guess. */
1243 return f_estimate
< sw_estimate
;
1245 /* ??? These numbers are completely made up so far. */
1247 return f_estimate
< 100 || f_estimate
* 2 < sw_estimate
;
1249 return f_estimate
< 40 || f_estimate
* 3 < sw_estimate
* 2;
1252 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1253 to a sequence of labels and blocks, plus the exception region trees
1254 that record all the magic. This is complicated by the need to
1255 arrange for the FINALLY block to be executed on all exits. */
1258 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1260 struct leh_tf_state this_tf
;
1261 struct leh_state this_state
;
1264 /* Process the try block. */
1266 memset (&this_tf
, 0, sizeof (this_tf
));
1267 this_tf
.try_finally_expr
= *tp
;
1269 this_tf
.outer
= state
;
1270 if (using_eh_for_cleanups_p
)
1272 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1274 this_tf
.region
= NULL
;
1276 this_state
.cur_region
= this_tf
.region
;
1277 this_state
.prev_try
= state
->prev_try
;
1278 this_state
.tf
= &this_tf
;
1280 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1282 /* Determine if the try block is escaped through the bottom. */
1283 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1285 /* Determine if any exceptions are possible within the try block. */
1286 if (using_eh_for_cleanups_p
)
1287 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1288 if (this_tf
.may_throw
)
1290 this_tf
.eh_label
= create_artificial_label ();
1291 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1292 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1295 /* Sort the goto queue for efficient searching later. */
1296 if (this_tf
.goto_queue_active
> 1)
1297 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1298 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1300 /* Determine how many edges (still) reach the finally block. Or rather,
1301 how many destinations are reached by the finally block. Use this to
1302 determine how we process the finally block itself. */
1304 if (this_tf
.dest_array
)
1305 ndests
= VARRAY_ACTIVE_SIZE (this_tf
.dest_array
);
1308 ndests
+= this_tf
.may_fallthru
;
1309 ndests
+= this_tf
.may_return
;
1310 ndests
+= this_tf
.may_throw
;
1312 /* If the FINALLY block is not reachable, dike it out. */
1314 *tp
= TREE_OPERAND (*tp
, 0);
1316 /* If the finally block doesn't fall through, then any destination
1317 we might try to impose there isn't reached either. There may be
1318 some minor amount of cleanup and redirection still needed. */
1319 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1320 lower_try_finally_nofallthru (state
, &this_tf
);
1322 /* We can easily special-case redirection to a single destination. */
1323 else if (ndests
== 1)
1324 lower_try_finally_onedest (state
, &this_tf
);
1326 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1327 lower_try_finally_copy (state
, &this_tf
);
1329 lower_try_finally_switch (state
, &this_tf
);
1331 /* If someone requested we add a label at the end of the transformed
1333 if (this_tf
.fallthru_label
)
1335 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1336 append_to_statement_list (x
, tp
);
1339 if (this_tf
.goto_queue
)
1340 free (this_tf
.goto_queue
);
1343 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1344 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1345 exception region trees that record all the magic. */
1348 lower_catch (struct leh_state
*state
, tree
*tp
)
1350 struct eh_region
*try_region
;
1351 struct leh_state this_state
;
1352 tree_stmt_iterator i
;
1355 try_region
= gen_eh_region_try (state
->cur_region
);
1356 this_state
.cur_region
= try_region
;
1357 this_state
.prev_try
= try_region
;
1358 this_state
.tf
= state
->tf
;
1360 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1362 if (!get_eh_region_may_contain_throw (try_region
))
1364 *tp
= TREE_OPERAND (*tp
, 0);
1369 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1371 struct eh_region
*catch_region
;
1372 tree
catch, x
, eh_label
;
1374 catch = tsi_stmt (i
);
1375 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1377 this_state
.cur_region
= catch_region
;
1378 this_state
.prev_try
= state
->prev_try
;
1379 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1381 eh_label
= create_artificial_label ();
1382 set_eh_region_tree_label (catch_region
, eh_label
);
1384 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1385 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1387 if (block_may_fallthru (CATCH_BODY (catch)))
1390 out_label
= create_artificial_label ();
1392 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1393 append_to_statement_list (x
, &CATCH_BODY (catch));
1396 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1400 frob_into_branch_around (tp
, NULL
, out_label
);
1403 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1404 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1405 region trees that record all the magic. */
1408 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1410 struct leh_state this_state
;
1411 struct eh_region
*this_region
;
1412 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1415 if (EH_FILTER_MUST_NOT_THROW (inner
))
1416 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1418 this_region
= gen_eh_region_allowed (state
->cur_region
,
1419 EH_FILTER_TYPES (inner
));
1420 this_state
= *state
;
1421 this_state
.cur_region
= this_region
;
1423 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1425 if (!get_eh_region_may_contain_throw (this_region
))
1427 *tp
= TREE_OPERAND (*tp
, 0);
1431 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1432 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1434 eh_label
= create_artificial_label ();
1435 set_eh_region_tree_label (this_region
, eh_label
);
1437 frob_into_branch_around (tp
, eh_label
, NULL
);
1440 /* Implement a cleanup expression. This is similar to try-finally,
1441 except that we only execute the cleanup block for exception edges. */
1444 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1446 struct leh_state this_state
;
1447 struct eh_region
*this_region
;
1448 struct leh_tf_state fake_tf
;
1450 /* If not using eh, then exception-only cleanups are no-ops. */
1451 if (!flag_exceptions
)
1453 *tp
= TREE_OPERAND (*tp
, 0);
1454 lower_eh_constructs_1 (state
, tp
);
1458 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1459 this_state
= *state
;
1460 this_state
.cur_region
= this_region
;
1462 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1464 if (!get_eh_region_may_contain_throw (this_region
))
1466 *tp
= TREE_OPERAND (*tp
, 0);
1470 /* Build enough of a try-finally state so that we can reuse
1471 honor_protect_cleanup_actions. */
1472 memset (&fake_tf
, 0, sizeof (fake_tf
));
1474 fake_tf
.outer
= state
;
1475 fake_tf
.region
= this_region
;
1476 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1477 fake_tf
.may_throw
= true;
1479 fake_tf
.eh_label
= create_artificial_label ();
1480 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1482 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1484 if (fake_tf
.may_throw
)
1486 /* In this case honor_protect_cleanup_actions had nothing to do,
1487 and we should process this normally. */
1488 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1489 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1493 /* In this case honor_protect_cleanup_actions did nearly all of
1494 the work. All we have left is to append the fallthru_label. */
1496 *tp
= TREE_OPERAND (*tp
, 0);
1497 if (fake_tf
.fallthru_label
)
1499 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1500 append_to_statement_list (x
, tp
);
1505 /* Main loop for lowering eh constructs. */
1508 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1510 tree_stmt_iterator i
;
1513 switch (TREE_CODE (t
))
1516 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1517 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1521 /* Look for things that can throw exceptions, and record them. */
1522 if (state
->cur_region
&& tree_could_throw_p (t
))
1524 record_stmt_eh_region (state
->cur_region
, t
);
1525 note_eh_region_may_contain_throw (state
->cur_region
);
1530 /* Look for things that can throw exceptions, and record them. */
1531 if (state
->cur_region
&& tree_could_throw_p (t
))
1533 record_stmt_eh_region (state
->cur_region
, t
);
1534 note_eh_region_may_contain_throw (state
->cur_region
);
1536 /* ??? For the benefit of calls.c, converting all this to rtl,
1537 we need to record the call expression, not just the outer
1538 modify statement. */
1539 if (TREE_CODE (TREE_OPERAND (t
, 1)) == CALL_EXPR
)
1540 record_stmt_eh_region (state
->cur_region
, TREE_OPERAND (t
, 1));
1546 maybe_record_in_goto_queue (state
, t
);
1549 verify_norecord_switch_expr (state
, t
);
1552 case TRY_FINALLY_EXPR
:
1553 lower_try_finally (state
, tp
);
1556 case TRY_CATCH_EXPR
:
1557 i
= tsi_start (TREE_OPERAND (t
, 1));
1558 switch (TREE_CODE (tsi_stmt (i
)))
1561 lower_catch (state
, tp
);
1563 case EH_FILTER_EXPR
:
1564 lower_eh_filter (state
, tp
);
1567 lower_cleanup (state
, tp
);
1572 case STATEMENT_LIST
:
1573 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1575 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1577 if (TREE_CODE (t
) == STATEMENT_LIST
)
1579 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1588 /* A type, a decl, or some kind of statement that we're not
1589 interested in. Don't walk them. */
1595 lower_eh_constructs (void)
1597 struct leh_state null_state
;
1598 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1600 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1601 throw_stmt_table
= htab_create_ggc (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1603 collect_finally_tree (*tp
, NULL
);
1605 memset (&null_state
, 0, sizeof (null_state
));
1606 lower_eh_constructs_1 (&null_state
, tp
);
1608 htab_delete (finally_tree
);
1610 collect_eh_region_array ();
1613 struct tree_opt_pass pass_lower_eh
=
1617 lower_eh_constructs
, /* execute */
1620 0, /* static_pass_number */
1621 TV_TREE_EH
, /* tv_id */
1622 PROP_gimple_lcf
, /* properties_required */
1623 PROP_gimple_leh
, /* properties_provided */
1624 PROP_gimple_lcf
, /* properties_destroyed */
1625 0, /* todo_flags_start */
1626 TODO_dump_func
/* todo_flags_finish */
1630 /* Construct EH edges for STMT. */
1633 make_eh_edge (struct eh_region
*region
, void *data
)
1636 basic_block src
, dst
;
1639 lab
= get_eh_region_tree_label (region
);
1641 src
= bb_for_stmt (stmt
);
1642 dst
= label_to_block (lab
);
1644 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1648 make_eh_edges (tree stmt
)
1653 if (TREE_CODE (stmt
) == RESX_EXPR
)
1655 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1660 region_nr
= lookup_stmt_eh_region (stmt
);
1666 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1671 /* Return true if the expr can trap, as in dereferencing an
1672 invalid pointer location. */
1675 tree_could_trap_p (tree expr
)
1677 enum tree_code code
= TREE_CODE (expr
);
1687 t
= get_base_address (expr
);
1688 return !t
|| TREE_CODE (t
) == INDIRECT_REF
;
1691 case TRUNC_DIV_EXPR
:
1693 case FLOOR_DIV_EXPR
:
1694 case ROUND_DIV_EXPR
:
1695 case EXACT_DIV_EXPR
:
1697 case FLOOR_MOD_EXPR
:
1698 case ROUND_MOD_EXPR
:
1699 case TRUNC_MOD_EXPR
:
1711 tree_could_throw_p (tree t
)
1713 if (!flag_exceptions
)
1715 if (TREE_CODE (t
) == MODIFY_EXPR
)
1717 tree sub
= TREE_OPERAND (t
, 1);
1718 if (TREE_CODE (sub
) == CALL_EXPR
)
1722 if (flag_non_call_exceptions
)
1724 if (tree_could_trap_p (sub
))
1726 return tree_could_trap_p (TREE_OPERAND (t
, 0));
1732 if (TREE_CODE (t
) == CALL_EXPR
)
1733 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
1739 tree_can_throw_internal (tree stmt
)
1741 int region_nr
= lookup_stmt_eh_region (stmt
);
1744 return can_throw_internal_1 (region_nr
);
1748 tree_can_throw_external (tree stmt
)
1750 int region_nr
= lookup_stmt_eh_region (stmt
);
1753 return can_throw_external_1 (region_nr
);
1756 #include "gt-tree-eh.h"