1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p
= 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p
= 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a
, const void *b
)
59 const void * const * x
= a
;
60 const void * const * y
= b
;
65 struct_ptr_hash (const void *a
)
67 const void * const * x
= a
;
68 return (size_t)*x
>> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 struct throw_stmt_node
GTY(())
90 static GTY((param_is (struct throw_stmt_node
))) htab_t throw_stmt_table
;
93 record_stmt_eh_region (struct eh_region
*region
, tree t
)
95 struct throw_stmt_node
*n
;
101 n
= ggc_alloc (sizeof (*n
));
103 n
->region_nr
= get_eh_region_number (region
);
105 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
111 add_stmt_to_eh_region (tree t
, int num
)
113 struct throw_stmt_node
*n
;
116 gcc_assert (num
>= 0);
118 n
= ggc_alloc (sizeof (*n
));
122 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
128 remove_stmt_from_eh_region (tree t
)
130 struct throw_stmt_node dummy
;
133 if (!throw_stmt_table
)
137 slot
= htab_find_slot (throw_stmt_table
, &dummy
, NO_INSERT
);
140 htab_clear_slot (throw_stmt_table
, slot
);
148 lookup_stmt_eh_region (tree t
)
150 struct throw_stmt_node
*p
, n
;
152 if (!throw_stmt_table
)
156 p
= htab_find (throw_stmt_table
, &n
);
158 return (p
? p
->region_nr
: -1);
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 struct finally_tree_node
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree
;
176 record_in_finally_tree (tree child
, tree parent
)
178 struct finally_tree_node
*n
;
181 n
= xmalloc (sizeof (*n
));
185 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
191 collect_finally_tree (tree t
, tree region
)
194 switch (TREE_CODE (t
))
197 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
200 case TRY_FINALLY_EXPR
:
201 record_in_finally_tree (t
, region
);
202 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
203 t
= TREE_OPERAND (t
, 1);
207 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
208 t
= TREE_OPERAND (t
, 1);
216 t
= EH_FILTER_FAILURE (t
);
221 tree_stmt_iterator i
;
222 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
223 collect_finally_tree (tsi_stmt (i
), region
);
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
238 outside_finally_tree (tree start
, tree target
)
240 struct finally_tree_node n
, *p
;
245 p
= htab_find (finally_tree
, &n
);
250 while (start
!= target
);
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
260 /* State of the world while lowering. */
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region
*cur_region
;
268 struct eh_region
*prev_try
;
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state
*tf
;
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr
;
285 /* The state outside this try_finally node. */
286 struct leh_state
*outer
;
288 /* The exception region created for it. */
289 struct eh_region
*region
;
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node
{
299 size_t goto_queue_size
;
300 size_t goto_queue_active
;
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array
;
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
326 static void lower_eh_filter (struct leh_state
*, tree
*);
327 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
333 goto_queue_cmp (const void *x
, const void *y
)
335 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
336 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
337 return (a
== b
? 0 : a
< b
? -1 : 1);
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
344 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
346 struct goto_queue_node tmp
, *ret
;
348 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
349 sizeof (struct goto_queue_node
), goto_queue_cmp
);
350 return (ret
? ret
->repl_stmt
: NULL
);
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
359 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
360 tree_stmt_iterator
*tsi
)
362 tree
new, one
, label
;
364 new = find_goto_replacement (tf
, *tp
);
368 one
= expr_only (new);
369 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
375 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
376 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
378 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
379 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
385 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
388 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
390 switch (TREE_CODE (t
))
394 t
= find_goto_replacement (tf
, t
);
397 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
408 case TRY_FINALLY_EXPR
:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
414 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
424 /* These won't have gotos in them. */
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
434 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
436 tree_stmt_iterator i
= tsi_start (t
);
437 while (!tsi_end_p (i
))
438 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
441 /* Replace all goto queue members. */
444 replace_goto_queue (struct leh_tf_state
*tf
)
446 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
449 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
450 node, and if so record that fact in the goto queue associated with that
454 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
456 struct leh_tf_state
*tf
= state
->tf
;
457 struct goto_queue_node
*q
;
464 switch (TREE_CODE (stmt
))
468 tree lab
= GOTO_DESTINATION (stmt
);
470 /* Computed and non-local gotos do not get processed. Given
471 their nature we can neither tell whether we've escaped the
472 finally block nor redirect them if we knew. */
473 if (TREE_CODE (lab
) != LABEL_DECL
)
476 /* No need to record gotos that don't leave the try block. */
477 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
480 if (! tf
->dest_array
)
482 VARRAY_TREE_INIT (tf
->dest_array
, 10, "dest_array");
483 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
488 int n
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
489 for (index
= 0; index
< n
; ++index
)
490 if (VARRAY_TREE (tf
->dest_array
, index
) == lab
)
493 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
499 tf
->may_return
= true;
507 active
= tf
->goto_queue_active
;
508 size
= tf
->goto_queue_size
;
511 size
= (size
? size
* 2 : 32);
512 tf
->goto_queue_size
= size
;
514 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
517 q
= &tf
->goto_queue
[active
];
518 tf
->goto_queue_active
= active
+ 1;
520 memset (q
, 0, sizeof (*q
));
525 #ifdef ENABLE_CHECKING
526 /* We do not process SWITCH_EXPRs for now. As long as the original source
527 was in fact structured, and we've not yet done jump threading, then none
528 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
531 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
533 struct leh_tf_state
*tf
= state
->tf
;
540 vec
= SWITCH_LABELS (switch_expr
);
541 n
= TREE_VEC_LENGTH (vec
);
543 for (i
= 0; i
< n
; ++i
)
545 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
546 gcc_assert (!outside_finally_tree (lab
, tf
->try_finally_expr
));
550 #define verify_norecord_switch_expr(state, switch_expr)
553 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
554 whatever is needed to finish the return. If MOD is non-null, insert it
555 before the new branch. RETURN_VALUE_P is a cache containing a temporary
556 variable to be used in manipulating the value returned from the function. */
559 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
560 tree
*return_value_p
)
562 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
567 /* The nasty part about redirecting the return value is that the
568 return value itself is to be computed before the FINALLY block
582 should return 0, not 1. Arrange for this to happen by copying
583 computed the return value into a local temporary. This also
584 allows us to redirect multiple return statements through the
585 same destination block; whether this is a net win or not really
586 depends, I guess, but it does make generation of the switch in
587 lower_try_finally_switch easier. */
589 switch (TREE_CODE (ret_expr
))
592 if (!*return_value_p
)
593 *return_value_p
= ret_expr
;
595 gcc_assert (*return_value_p
== ret_expr
);
596 q
->cont_stmt
= q
->stmt
;
601 tree result
= TREE_OPERAND (ret_expr
, 0);
602 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
604 if (!*return_value_p
)
606 if (aggregate_value_p (TREE_TYPE (result
),
607 TREE_TYPE (current_function_decl
)))
608 /* If this function returns in memory, copy the argument
609 into the return slot now. Otherwise, we might need to
610 worry about magic return semantics, so we need to use a
611 temporary to hold the value until we're actually ready
615 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
616 *return_value_p
= new;
619 new = *return_value_p
;
621 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
622 append_to_statement_list (x
, &q
->repl_stmt
);
627 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
628 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
637 /* If we don't return a value, all return statements are the same. */
638 q
->cont_stmt
= q
->stmt
;
642 append_to_statement_list (mod
, &q
->repl_stmt
);
644 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
645 append_to_statement_list (x
, &q
->repl_stmt
);
648 /* Similar, but easier, for GOTO_EXPR. */
651 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
655 q
->cont_stmt
= q
->stmt
;
657 append_to_statement_list (mod
, &q
->repl_stmt
);
659 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
660 append_to_statement_list (x
, &q
->repl_stmt
);
663 /* We want to transform
664 try { body; } catch { stuff; }
666 body; goto over; lab: stuff; over:
668 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
669 should be placed before the second operand, or NULL. OVER is
670 an existing label that should be put at the exit, or NULL. */
673 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
677 op1
= TREE_OPERAND (*tp
, 1);
678 *tp
= TREE_OPERAND (*tp
, 0);
680 if (block_may_fallthru (*tp
))
683 over
= create_artificial_label ();
684 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
685 append_to_statement_list (x
, tp
);
690 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
691 append_to_statement_list (x
, tp
);
694 append_to_statement_list (op1
, tp
);
698 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
699 append_to_statement_list (x
, tp
);
703 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
704 Make sure to record all new labels found. */
707 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
711 t
= unsave_expr_now (t
);
714 region
= outer_state
->tf
->try_finally_expr
;
715 collect_finally_tree (t
, region
);
720 /* A subroutine of lower_try_finally. Create a fallthru label for
721 the given try_finally state. The only tricky bit here is that
722 we have to make sure to record the label in our outer context. */
725 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
727 tree label
= tf
->fallthru_label
;
730 label
= create_artificial_label ();
731 tf
->fallthru_label
= label
;
733 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
738 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
739 returns non-null, then the language requires that the exception path out
740 of a try_finally be treated specially. To wit: the code within the
741 finally block may not itself throw an exception. We have two choices here.
742 First we can duplicate the finally block and wrap it in a must_not_throw
743 region. Second, we can generate code like
748 if (fintmp == eh_edge)
749 protect_cleanup_actions;
752 where "fintmp" is the temporary used in the switch statement generation
753 alternative considered below. For the nonce, we always choose the first
756 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
759 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
760 struct leh_state
*this_state
,
761 struct leh_tf_state
*tf
)
763 tree protect_cleanup_actions
, finally
, x
;
764 tree_stmt_iterator i
;
765 bool finally_may_fallthru
;
767 /* First check for nothing to do. */
768 if (lang_protect_cleanup_actions
)
769 protect_cleanup_actions
= lang_protect_cleanup_actions ();
771 protect_cleanup_actions
= NULL
;
773 finally
= TREE_OPERAND (*tf
->top_p
, 1);
775 /* If the EH case of the finally block can fall through, this may be a
776 structure of the form
789 E.g. with an inline destructor with an embedded try block. In this
790 case we must save the runtime EH data around the nested exception.
792 This complication means that any time the previous runtime data might
793 be used (via fallthru from the finally) we handle the eh case here,
794 whether or not protect_cleanup_actions is active. */
796 finally_may_fallthru
= block_may_fallthru (finally
);
797 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
800 /* Duplicate the FINALLY block. Only need to do this for try-finally,
801 and not for cleanups. */
803 finally
= lower_try_finally_dup_block (finally
, outer_state
);
805 /* Resume execution after the exception. Adding this now lets
806 lower_eh_filter not add unnecessary gotos, as it is clear that
807 we never fallthru from this copy of the finally block. */
808 if (finally_may_fallthru
)
810 tree save_eptr
, save_filt
;
812 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
813 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
815 i
= tsi_start (finally
);
816 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
817 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
818 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
820 x
= build (FILTER_EXPR
, integer_type_node
);
821 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
822 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
824 i
= tsi_last (finally
);
825 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
826 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
827 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
829 x
= build (FILTER_EXPR
, integer_type_node
);
830 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
831 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
833 x
= build1 (RESX_EXPR
, void_type_node
,
834 build_int_cst (NULL_TREE
,
835 get_eh_region_number (tf
->region
)));
836 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
839 /* Wrap the block with protect_cleanup_actions as the action. */
840 if (protect_cleanup_actions
)
842 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
843 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
844 EH_FILTER_MUST_NOT_THROW (x
) = 1;
845 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
846 lower_eh_filter (outer_state
, &finally
);
849 lower_eh_constructs_1 (outer_state
, &finally
);
851 /* Hook this up to the end of the existing try block. If we
852 previously fell through the end, we'll have to branch around.
853 This means adding a new goto, and adding it to the queue. */
855 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
857 if (tf
->may_fallthru
)
859 x
= lower_try_finally_fallthru_label (tf
);
860 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
861 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
864 maybe_record_in_goto_queue (this_state
, x
);
866 tf
->may_fallthru
= false;
869 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
870 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
871 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
873 /* Having now been handled, EH isn't to be considered with
874 the rest of the outgoing edges. */
875 tf
->may_throw
= false;
878 /* A subroutine of lower_try_finally. We have determined that there is
879 no fallthru edge out of the finally block. This means that there is
880 no outgoing edge corresponding to any incoming edge. Restructure the
881 try_finally node for this special case. */
884 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
886 tree x
, finally
, lab
, return_val
;
887 struct goto_queue_node
*q
, *qe
;
892 lab
= create_artificial_label ();
894 finally
= TREE_OPERAND (*tf
->top_p
, 1);
895 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
897 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
898 append_to_statement_list (x
, tf
->top_p
);
902 qe
= q
+ tf
->goto_queue_active
;
905 do_return_redirection (q
, lab
, NULL
, &return_val
);
907 do_goto_redirection (q
, lab
, NULL
);
909 replace_goto_queue (tf
);
911 lower_eh_constructs_1 (state
, &finally
);
912 append_to_statement_list (finally
, tf
->top_p
);
915 /* A subroutine of lower_try_finally. We have determined that there is
916 exactly one destination of the finally block. Restructure the
917 try_finally node for this special case. */
920 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
922 struct goto_queue_node
*q
, *qe
;
923 tree x
, finally
, finally_label
;
925 finally
= TREE_OPERAND (*tf
->top_p
, 1);
926 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
928 lower_eh_constructs_1 (state
, &finally
);
932 /* Only reachable via the exception edge. Add the given label to
933 the head of the FINALLY block. Append a RESX at the end. */
935 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
936 append_to_statement_list (x
, tf
->top_p
);
938 append_to_statement_list (finally
, tf
->top_p
);
940 x
= build1 (RESX_EXPR
, void_type_node
,
941 build_int_cst (NULL_TREE
,
942 get_eh_region_number (tf
->region
)));
943 append_to_statement_list (x
, tf
->top_p
);
948 if (tf
->may_fallthru
)
950 /* Only reachable via the fallthru edge. Do nothing but let
951 the two blocks run together; we'll fall out the bottom. */
952 append_to_statement_list (finally
, tf
->top_p
);
956 finally_label
= create_artificial_label ();
957 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
958 append_to_statement_list (x
, tf
->top_p
);
960 append_to_statement_list (finally
, tf
->top_p
);
963 qe
= q
+ tf
->goto_queue_active
;
967 /* Reachable by return expressions only. Redirect them. */
968 tree return_val
= NULL
;
970 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
971 replace_goto_queue (tf
);
975 /* Reachable by goto expressions only. Redirect them. */
977 do_goto_redirection (q
, finally_label
, NULL
);
978 replace_goto_queue (tf
);
980 if (VARRAY_TREE (tf
->dest_array
, 0) == tf
->fallthru_label
)
982 /* Reachable by goto to fallthru label only. Redirect it
983 to the new label (already created, sadly), and do not
984 emit the final branch out, or the fallthru label. */
985 tf
->fallthru_label
= NULL
;
990 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
991 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
994 /* A subroutine of lower_try_finally. There are multiple edges incoming
995 and outgoing from the finally block. Implement this by duplicating the
996 finally block for every destination. */
999 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1001 tree finally
, new_stmt
;
1004 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1005 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1007 new_stmt
= NULL_TREE
;
1009 if (tf
->may_fallthru
)
1011 x
= lower_try_finally_dup_block (finally
, state
);
1012 lower_eh_constructs_1 (state
, &x
);
1013 append_to_statement_list (x
, &new_stmt
);
1015 x
= lower_try_finally_fallthru_label (tf
);
1016 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1017 append_to_statement_list (x
, &new_stmt
);
1022 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1023 append_to_statement_list (x
, &new_stmt
);
1025 x
= lower_try_finally_dup_block (finally
, state
);
1026 lower_eh_constructs_1 (state
, &x
);
1027 append_to_statement_list (x
, &new_stmt
);
1029 x
= build1 (RESX_EXPR
, void_type_node
,
1030 build_int_cst (NULL_TREE
,
1031 get_eh_region_number (tf
->region
)));
1032 append_to_statement_list (x
, &new_stmt
);
1037 struct goto_queue_node
*q
, *qe
;
1038 tree return_val
= NULL
;
1043 return_index
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1046 labels
= xcalloc (sizeof (tree
), return_index
+ 1);
1049 qe
= q
+ tf
->goto_queue_active
;
1052 int index
= q
->index
< 0 ? return_index
: q
->index
;
1053 tree lab
= labels
[index
];
1054 bool build_p
= false;
1058 labels
[index
] = lab
= create_artificial_label ();
1062 if (index
== return_index
)
1063 do_return_redirection (q
, lab
, NULL
, &return_val
);
1065 do_goto_redirection (q
, lab
, NULL
);
1069 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1070 append_to_statement_list (x
, &new_stmt
);
1072 x
= lower_try_finally_dup_block (finally
, state
);
1073 lower_eh_constructs_1 (state
, &x
);
1074 append_to_statement_list (x
, &new_stmt
);
1076 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1077 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1080 replace_goto_queue (tf
);
1084 /* Need to link new stmts after running replace_goto_queue due
1085 to not wanting to process the same goto stmts twice. */
1086 append_to_statement_list (new_stmt
, tf
->top_p
);
1089 /* A subroutine of lower_try_finally. There are multiple edges incoming
1090 and outgoing from the finally block. Implement this by instrumenting
1091 each incoming edge and creating a switch statement at the end of the
1092 finally block that branches to the appropriate destination. */
1095 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1097 struct goto_queue_node
*q
, *qe
;
1098 tree return_val
= NULL
;
1099 tree finally
, finally_tmp
, finally_label
;
1100 int return_index
, eh_index
, fallthru_index
;
1101 int nlabels
, ndests
, j
, last_case_index
;
1102 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1105 /* Mash the TRY block to the head of the chain. */
1106 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1107 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1109 /* Lower the finally block itself. */
1110 lower_eh_constructs_1 (state
, &finally
);
1112 /* Prepare for switch statement generation. */
1114 nlabels
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1117 return_index
= nlabels
;
1118 eh_index
= return_index
+ tf
->may_return
;
1119 fallthru_index
= eh_index
+ tf
->may_throw
;
1120 ndests
= fallthru_index
+ tf
->may_fallthru
;
1122 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1123 finally_label
= create_artificial_label ();
1125 case_label_vec
= make_tree_vec (ndests
);
1126 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1127 NULL_TREE
, case_label_vec
);
1130 last_case_index
= 0;
1132 /* Begin inserting code for getting to the finally block. Things
1133 are done in this order to correspond to the sequence the code is
1136 if (tf
->may_fallthru
)
1138 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1139 build_int_cst (NULL_TREE
, fallthru_index
));
1140 append_to_statement_list (x
, tf
->top_p
);
1144 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1145 append_to_statement_list (x
, tf
->top_p
);
1149 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1150 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1151 create_artificial_label ());
1152 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1155 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1156 append_to_statement_list (x
, &switch_body
);
1158 x
= lower_try_finally_fallthru_label (tf
);
1159 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1160 append_to_statement_list (x
, &switch_body
);
1165 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1166 append_to_statement_list (x
, tf
->top_p
);
1168 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1169 build_int_cst (NULL_TREE
, eh_index
));
1170 append_to_statement_list (x
, tf
->top_p
);
1172 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1173 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1174 create_artificial_label ());
1175 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1178 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1179 append_to_statement_list (x
, &switch_body
);
1180 x
= build1 (RESX_EXPR
, void_type_node
,
1181 build_int_cst (NULL_TREE
,
1182 get_eh_region_number (tf
->region
)));
1183 append_to_statement_list (x
, &switch_body
);
1186 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1187 append_to_statement_list (x
, tf
->top_p
);
1189 append_to_statement_list (finally
, tf
->top_p
);
1191 /* Redirect each incoming goto edge. */
1193 qe
= q
+ tf
->goto_queue_active
;
1194 j
= last_case_index
+ tf
->may_return
;
1195 last_case_index
+= nlabels
;
1199 int switch_id
, case_index
;
1203 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1204 build_int_cst (NULL_TREE
, return_index
));
1205 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1206 switch_id
= return_index
;
1210 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1211 build_int_cst (NULL_TREE
, q
->index
));
1212 do_goto_redirection (q
, finally_label
, mod
);
1213 switch_id
= q
->index
;
1216 case_index
= j
+ q
->index
;
1217 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1219 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1220 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1221 create_artificial_label ());
1222 TREE_VEC_ELT (case_label_vec
, case_index
) = last_case
;
1224 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1225 append_to_statement_list (x
, &switch_body
);
1226 append_to_statement_list (q
->cont_stmt
, &switch_body
);
1227 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1230 replace_goto_queue (tf
);
1231 last_case_index
+= nlabels
;
1233 /* Make sure that the last case is the default label, as one is required.
1234 Then sort the labels, which is also required in GIMPLE. */
1235 CASE_LOW (last_case
) = NULL
;
1236 sort_case_labels (case_label_vec
);
1238 /* Need to link switch_stmt after running replace_goto_queue due
1239 to not wanting to process the same goto stmts twice. */
1240 append_to_statement_list (switch_stmt
, tf
->top_p
);
1241 append_to_statement_list (switch_body
, tf
->top_p
);
1244 /* Decide whether or not we are going to duplicate the finally block.
1245 There are several considerations.
1247 First, if this is Java, then the finally block contains code
1248 written by the user. It has line numbers associated with it,
1249 so duplicating the block means it's difficult to set a breakpoint.
1250 Since controlling code generation via -g is verboten, we simply
1251 never duplicate code without optimization.
1253 Second, we'd like to prevent egregious code growth. One way to
1254 do this is to estimate the size of the finally block, multiply
1255 that by the number of copies we'd need to make, and compare against
1256 the estimate of the size of the switch machinery we'd have to add. */
1259 decide_copy_try_finally (int ndests
, tree finally
)
1261 int f_estimate
, sw_estimate
;
1266 /* Finally estimate N times, plus N gotos. */
1267 f_estimate
= estimate_num_insns (finally
);
1268 f_estimate
= (f_estimate
+ 1) * ndests
;
1270 /* Switch statement (cost 10), N variable assignments, N gotos. */
1271 sw_estimate
= 10 + 2 * ndests
;
1273 /* Optimize for size clearly wants our best guess. */
1275 return f_estimate
< sw_estimate
;
1277 /* ??? These numbers are completely made up so far. */
1279 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1281 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1284 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1285 to a sequence of labels and blocks, plus the exception region trees
1286 that record all the magic. This is complicated by the need to
1287 arrange for the FINALLY block to be executed on all exits. */
1290 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1292 struct leh_tf_state this_tf
;
1293 struct leh_state this_state
;
1296 /* Process the try block. */
1298 memset (&this_tf
, 0, sizeof (this_tf
));
1299 this_tf
.try_finally_expr
= *tp
;
1301 this_tf
.outer
= state
;
1302 if (using_eh_for_cleanups_p
)
1304 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1306 this_tf
.region
= NULL
;
1308 this_state
.cur_region
= this_tf
.region
;
1309 this_state
.prev_try
= state
->prev_try
;
1310 this_state
.tf
= &this_tf
;
1312 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1314 /* Determine if the try block is escaped through the bottom. */
1315 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1317 /* Determine if any exceptions are possible within the try block. */
1318 if (using_eh_for_cleanups_p
)
1319 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1320 if (this_tf
.may_throw
)
1322 this_tf
.eh_label
= create_artificial_label ();
1323 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1324 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1327 /* Sort the goto queue for efficient searching later. */
1328 if (this_tf
.goto_queue_active
> 1)
1329 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1330 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1332 /* Determine how many edges (still) reach the finally block. Or rather,
1333 how many destinations are reached by the finally block. Use this to
1334 determine how we process the finally block itself. */
1336 if (this_tf
.dest_array
)
1337 ndests
= VARRAY_ACTIVE_SIZE (this_tf
.dest_array
);
1340 ndests
+= this_tf
.may_fallthru
;
1341 ndests
+= this_tf
.may_return
;
1342 ndests
+= this_tf
.may_throw
;
1344 /* If the FINALLY block is not reachable, dike it out. */
1346 *tp
= TREE_OPERAND (*tp
, 0);
1348 /* If the finally block doesn't fall through, then any destination
1349 we might try to impose there isn't reached either. There may be
1350 some minor amount of cleanup and redirection still needed. */
1351 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1352 lower_try_finally_nofallthru (state
, &this_tf
);
1354 /* We can easily special-case redirection to a single destination. */
1355 else if (ndests
== 1)
1356 lower_try_finally_onedest (state
, &this_tf
);
1358 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1359 lower_try_finally_copy (state
, &this_tf
);
1361 lower_try_finally_switch (state
, &this_tf
);
1363 /* If someone requested we add a label at the end of the transformed
1365 if (this_tf
.fallthru_label
)
1367 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1368 append_to_statement_list (x
, tp
);
1371 if (this_tf
.goto_queue
)
1372 free (this_tf
.goto_queue
);
1375 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1376 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1377 exception region trees that record all the magic. */
1380 lower_catch (struct leh_state
*state
, tree
*tp
)
1382 struct eh_region
*try_region
;
1383 struct leh_state this_state
;
1384 tree_stmt_iterator i
;
1387 try_region
= gen_eh_region_try (state
->cur_region
);
1388 this_state
.cur_region
= try_region
;
1389 this_state
.prev_try
= try_region
;
1390 this_state
.tf
= state
->tf
;
1392 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1394 if (!get_eh_region_may_contain_throw (try_region
))
1396 *tp
= TREE_OPERAND (*tp
, 0);
1401 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1403 struct eh_region
*catch_region
;
1404 tree
catch, x
, eh_label
;
1406 catch = tsi_stmt (i
);
1407 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1409 this_state
.cur_region
= catch_region
;
1410 this_state
.prev_try
= state
->prev_try
;
1411 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1413 eh_label
= create_artificial_label ();
1414 set_eh_region_tree_label (catch_region
, eh_label
);
1416 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1417 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1419 if (block_may_fallthru (CATCH_BODY (catch)))
1422 out_label
= create_artificial_label ();
1424 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1425 append_to_statement_list (x
, &CATCH_BODY (catch));
1428 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1432 frob_into_branch_around (tp
, NULL
, out_label
);
1435 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1436 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1437 region trees that record all the magic. */
1440 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1442 struct leh_state this_state
;
1443 struct eh_region
*this_region
;
1444 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1447 if (EH_FILTER_MUST_NOT_THROW (inner
))
1448 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1450 this_region
= gen_eh_region_allowed (state
->cur_region
,
1451 EH_FILTER_TYPES (inner
));
1452 this_state
= *state
;
1453 this_state
.cur_region
= this_region
;
1455 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1457 if (!get_eh_region_may_contain_throw (this_region
))
1459 *tp
= TREE_OPERAND (*tp
, 0);
1463 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1464 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1466 eh_label
= create_artificial_label ();
1467 set_eh_region_tree_label (this_region
, eh_label
);
1469 frob_into_branch_around (tp
, eh_label
, NULL
);
1472 /* Implement a cleanup expression. This is similar to try-finally,
1473 except that we only execute the cleanup block for exception edges. */
1476 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1478 struct leh_state this_state
;
1479 struct eh_region
*this_region
;
1480 struct leh_tf_state fake_tf
;
1482 /* If not using eh, then exception-only cleanups are no-ops. */
1483 if (!flag_exceptions
)
1485 *tp
= TREE_OPERAND (*tp
, 0);
1486 lower_eh_constructs_1 (state
, tp
);
1490 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1491 this_state
= *state
;
1492 this_state
.cur_region
= this_region
;
1494 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1496 if (!get_eh_region_may_contain_throw (this_region
))
1498 *tp
= TREE_OPERAND (*tp
, 0);
1502 /* Build enough of a try-finally state so that we can reuse
1503 honor_protect_cleanup_actions. */
1504 memset (&fake_tf
, 0, sizeof (fake_tf
));
1506 fake_tf
.outer
= state
;
1507 fake_tf
.region
= this_region
;
1508 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1509 fake_tf
.may_throw
= true;
1511 fake_tf
.eh_label
= create_artificial_label ();
1512 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1514 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1516 if (fake_tf
.may_throw
)
1518 /* In this case honor_protect_cleanup_actions had nothing to do,
1519 and we should process this normally. */
1520 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1521 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1525 /* In this case honor_protect_cleanup_actions did nearly all of
1526 the work. All we have left is to append the fallthru_label. */
1528 *tp
= TREE_OPERAND (*tp
, 0);
1529 if (fake_tf
.fallthru_label
)
1531 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1532 append_to_statement_list (x
, tp
);
1537 /* Main loop for lowering eh constructs. */
1540 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1542 tree_stmt_iterator i
;
1545 switch (TREE_CODE (t
))
1548 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1549 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1553 /* Look for things that can throw exceptions, and record them. */
1554 if (state
->cur_region
&& tree_could_throw_p (t
))
1556 record_stmt_eh_region (state
->cur_region
, t
);
1557 note_eh_region_may_contain_throw (state
->cur_region
);
1562 /* Look for things that can throw exceptions, and record them. */
1563 if (state
->cur_region
&& tree_could_throw_p (t
))
1567 record_stmt_eh_region (state
->cur_region
, t
);
1568 note_eh_region_may_contain_throw (state
->cur_region
);
1570 /* ??? For the benefit of calls.c, converting all this to rtl,
1571 we need to record the call expression, not just the outer
1572 modify statement. */
1573 op
= get_call_expr_in (t
);
1575 record_stmt_eh_region (state
->cur_region
, op
);
1581 maybe_record_in_goto_queue (state
, t
);
1584 verify_norecord_switch_expr (state
, t
);
1587 case TRY_FINALLY_EXPR
:
1588 lower_try_finally (state
, tp
);
1591 case TRY_CATCH_EXPR
:
1592 i
= tsi_start (TREE_OPERAND (t
, 1));
1593 switch (TREE_CODE (tsi_stmt (i
)))
1596 lower_catch (state
, tp
);
1598 case EH_FILTER_EXPR
:
1599 lower_eh_filter (state
, tp
);
1602 lower_cleanup (state
, tp
);
1607 case STATEMENT_LIST
:
1608 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1610 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1612 if (TREE_CODE (t
) == STATEMENT_LIST
)
1614 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1623 /* A type, a decl, or some kind of statement that we're not
1624 interested in. Don't walk them. */
1630 lower_eh_constructs (void)
1632 struct leh_state null_state
;
1633 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1635 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1636 throw_stmt_table
= htab_create_ggc (31, struct_ptr_hash
, struct_ptr_eq
,
1639 collect_finally_tree (*tp
, NULL
);
1641 memset (&null_state
, 0, sizeof (null_state
));
1642 lower_eh_constructs_1 (&null_state
, tp
);
1644 htab_delete (finally_tree
);
1646 collect_eh_region_array ();
1649 struct tree_opt_pass pass_lower_eh
=
1653 lower_eh_constructs
, /* execute */
1656 0, /* static_pass_number */
1657 TV_TREE_EH
, /* tv_id */
1658 PROP_gimple_lcf
, /* properties_required */
1659 PROP_gimple_leh
, /* properties_provided */
1660 PROP_gimple_lcf
, /* properties_destroyed */
1661 0, /* todo_flags_start */
1662 TODO_dump_func
, /* todo_flags_finish */
1667 /* Construct EH edges for STMT. */
1670 make_eh_edge (struct eh_region
*region
, void *data
)
1673 basic_block src
, dst
;
1676 lab
= get_eh_region_tree_label (region
);
1678 src
= bb_for_stmt (stmt
);
1679 dst
= label_to_block (lab
);
1681 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1685 make_eh_edges (tree stmt
)
1690 if (TREE_CODE (stmt
) == RESX_EXPR
)
1692 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1697 region_nr
= lookup_stmt_eh_region (stmt
);
1703 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1708 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1709 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1710 This routine expects only GIMPLE lhs or rhs input. */
1713 tree_could_trap_p (tree expr
)
1715 enum tree_code code
= TREE_CODE (expr
);
1716 bool honor_nans
= false;
1717 bool honor_snans
= false;
1718 bool fp_operation
= false;
1719 bool honor_trapv
= false;
1722 if (TREE_CODE_CLASS (code
) == tcc_comparison
1723 || TREE_CODE_CLASS (code
) == tcc_unary
1724 || TREE_CODE_CLASS (code
) == tcc_binary
)
1726 t
= TREE_TYPE (expr
);
1727 fp_operation
= FLOAT_TYPE_P (t
);
1730 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1731 honor_snans
= flag_signaling_nans
!= 0;
1733 else if (INTEGRAL_TYPE_P (t
) && TYPE_TRAP_SIGNED (t
))
1744 case WITH_SIZE_EXPR
:
1745 expr
= TREE_OPERAND (expr
, 0);
1746 code
= TREE_CODE (expr
);
1749 case ARRAY_RANGE_REF
:
1750 /* Let us be conservative here for now. We might be checking bounds of
1751 the access similarly to the case below. */
1752 if (!TREE_THIS_NOTRAP (expr
))
1755 base
= TREE_OPERAND (expr
, 0);
1756 return tree_could_trap_p (base
);
1759 base
= TREE_OPERAND (expr
, 0);
1760 idx
= TREE_OPERAND (expr
, 1);
1761 if (tree_could_trap_p (base
))
1764 if (TREE_THIS_NOTRAP (expr
))
1767 return !in_array_bounds_p (expr
);
1770 case ALIGN_INDIRECT_REF
:
1771 case MISALIGNED_INDIRECT_REF
:
1772 return !TREE_THIS_NOTRAP (expr
);
1775 return TREE_THIS_VOLATILE (expr
);
1777 case TRUNC_DIV_EXPR
:
1779 case FLOOR_DIV_EXPR
:
1780 case ROUND_DIV_EXPR
:
1781 case EXACT_DIV_EXPR
:
1783 case FLOOR_MOD_EXPR
:
1784 case ROUND_MOD_EXPR
:
1785 case TRUNC_MOD_EXPR
:
1787 if (honor_snans
|| honor_trapv
)
1789 if (fp_operation
&& flag_trapping_math
)
1791 t
= TREE_OPERAND (expr
, 1);
1792 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1801 /* Some floating point comparisons may trap. */
1806 case UNORDERED_EXPR
:
1816 case FIX_TRUNC_EXPR
:
1818 case FIX_FLOOR_EXPR
:
1819 case FIX_ROUND_EXPR
:
1820 /* Conversion of floating point might trap. */
1826 /* These operations don't trap with floating point. */
1834 /* Any floating arithmetic may trap. */
1835 if (fp_operation
&& flag_trapping_math
)
1842 /* Any floating arithmetic may trap. */
1843 if (fp_operation
&& flag_trapping_math
)
1850 tree_could_throw_p (tree t
)
1852 if (!flag_exceptions
)
1854 if (TREE_CODE (t
) == MODIFY_EXPR
)
1856 if (flag_non_call_exceptions
1857 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
1859 t
= TREE_OPERAND (t
, 1);
1862 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
1863 t
= TREE_OPERAND (t
, 0);
1864 if (TREE_CODE (t
) == CALL_EXPR
)
1865 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
1866 if (flag_non_call_exceptions
)
1867 return tree_could_trap_p (t
);
1872 tree_can_throw_internal (tree stmt
)
1874 int region_nr
= lookup_stmt_eh_region (stmt
);
1877 return can_throw_internal_1 (region_nr
);
1881 tree_can_throw_external (tree stmt
)
1883 int region_nr
= lookup_stmt_eh_region (stmt
);
1886 return can_throw_external_1 (region_nr
);
1890 maybe_clean_eh_stmt (tree stmt
)
1892 if (!tree_could_throw_p (stmt
))
1893 if (remove_stmt_from_eh_region (stmt
))
1898 #include "gt-tree-eh.h"