1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p
= 0;
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p
= 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
58 struct_ptr_eq (const void *a
, const void *b
)
60 const void * const * x
= (const void * const *) a
;
61 const void * const * y
= (const void * const *) b
;
66 struct_ptr_hash (const void *a
)
68 const void * const * x
= (const void * const *) a
;
69 return (size_t)*x
>> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
86 record_stmt_eh_region (struct eh_region
*region
, tree t
)
91 add_stmt_to_eh_region (t
, get_eh_region_number (region
));
95 add_stmt_to_eh_region_fn (struct function
*ifun
, tree t
, int num
)
97 struct throw_stmt_node
*n
;
100 gcc_assert (num
>= 0);
101 gcc_assert (TREE_CODE (t
) != RESX_EXPR
);
103 n
= GGC_NEW (struct throw_stmt_node
);
107 if (!get_eh_throw_stmt_table (ifun
))
108 set_eh_throw_stmt_table (ifun
, htab_create_ggc (31, struct_ptr_hash
,
112 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), n
, INSERT
);
118 add_stmt_to_eh_region (tree t
, int num
)
120 add_stmt_to_eh_region_fn (cfun
, t
, num
);
124 remove_stmt_from_eh_region_fn (struct function
*ifun
, tree t
)
126 struct throw_stmt_node dummy
;
129 if (!get_eh_throw_stmt_table (ifun
))
133 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), &dummy
,
137 htab_clear_slot (get_eh_throw_stmt_table (ifun
), slot
);
145 remove_stmt_from_eh_region (tree t
)
147 return remove_stmt_from_eh_region_fn (cfun
, t
);
151 lookup_stmt_eh_region_fn (struct function
*ifun
, tree t
)
153 struct throw_stmt_node
*p
, n
;
155 if (!get_eh_throw_stmt_table (ifun
))
159 p
= (struct throw_stmt_node
*) htab_find (get_eh_throw_stmt_table (ifun
),
162 return (p
? p
->region_nr
: -1);
166 lookup_stmt_eh_region (tree t
)
168 /* We can get called from initialized data when -fnon-call-exceptions
169 is on; prevent crash. */
172 return lookup_stmt_eh_region_fn (cfun
, t
);
176 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
177 nodes and LABEL_DECL nodes. We will use this during the second phase to
178 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
180 struct finally_tree_node
185 /* Note that this table is *not* marked GTY. It is short-lived. */
186 static htab_t finally_tree
;
189 record_in_finally_tree (tree child
, tree parent
)
191 struct finally_tree_node
*n
;
194 n
= XNEW (struct finally_tree_node
);
198 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
204 collect_finally_tree (tree t
, tree region
)
207 switch (TREE_CODE (t
))
210 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
213 case TRY_FINALLY_EXPR
:
214 record_in_finally_tree (t
, region
);
215 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
216 t
= TREE_OPERAND (t
, 1);
220 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
221 t
= TREE_OPERAND (t
, 1);
229 t
= EH_FILTER_FAILURE (t
);
234 tree_stmt_iterator i
;
235 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
236 collect_finally_tree (tsi_stmt (i
), region
);
241 /* A type, a decl, or some kind of statement that we're not
242 interested in. Don't walk them. */
247 /* Use the finally tree to determine if a jump from START to TARGET
248 would leave the try_finally node that START lives in. */
251 outside_finally_tree (tree start
, tree target
)
253 struct finally_tree_node n
, *p
;
258 p
= (struct finally_tree_node
*) htab_find (finally_tree
, &n
);
263 while (start
!= target
);
268 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
269 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
270 The eh region creation is straight-forward, but frobbing all the gotos
271 and such into shape isn't. */
273 /* State of the world while lowering. */
277 /* What's "current" while constructing the eh region tree. These
278 correspond to variables of the same name in cfun->eh, which we
279 don't have easy access to. */
280 struct eh_region
*cur_region
;
281 struct eh_region
*prev_try
;
283 /* Processing of TRY_FINALLY requires a bit more state. This is
284 split out into a separate structure so that we don't have to
285 copy so much when processing other nodes. */
286 struct leh_tf_state
*tf
;
291 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
292 is the original TRY_FINALLY_EXPR. We need to retain this so that
293 outside_finally_tree can reliably reference the tree used in the
294 collect_finally_tree data structures. */
295 tree try_finally_expr
;
298 /* The state outside this try_finally node. */
299 struct leh_state
*outer
;
301 /* The exception region created for it. */
302 struct eh_region
*region
;
304 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
305 that are seen to escape this TRY_FINALLY_EXPR node. */
306 struct goto_queue_node
{
312 size_t goto_queue_size
;
313 size_t goto_queue_active
;
315 /* The set of unique labels seen as entries in the goto queue. */
316 VEC(tree
,heap
) *dest_array
;
318 /* A label to be added at the end of the completed transformed
319 sequence. It will be set if may_fallthru was true *at one time*,
320 though subsequent transformations may have cleared that flag. */
323 /* A label that has been registered with except.c to be the
324 landing pad for this try block. */
327 /* True if it is possible to fall out the bottom of the try block.
328 Cleared if the fallthru is converted to a goto. */
331 /* True if any entry in goto_queue is a RETURN_EXPR. */
334 /* True if the finally block can receive an exception edge.
335 Cleared if the exception case is handled by code duplication. */
339 static void lower_eh_filter (struct leh_state
*, tree
*);
340 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
342 /* Comparison function for qsort/bsearch. We're interested in
343 searching goto queue elements for source statements. */
346 goto_queue_cmp (const void *x
, const void *y
)
348 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
349 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
350 return (a
== b
? 0 : a
< b
? -1 : 1);
353 /* Search for STMT in the goto queue. Return the replacement,
354 or null if the statement isn't in the queue. */
357 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
359 struct goto_queue_node tmp
, *ret
;
361 ret
= (struct goto_queue_node
*)
362 bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
363 sizeof (struct goto_queue_node
), goto_queue_cmp
);
364 return (ret
? ret
->repl_stmt
: NULL
);
367 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
368 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
369 then we can just splat it in, otherwise we add the new stmts immediately
370 after the COND_EXPR and redirect. */
373 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
374 tree_stmt_iterator
*tsi
)
376 tree
new, one
, label
;
378 new = find_goto_replacement (tf
, *tp
);
382 one
= expr_only (new);
383 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
389 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
390 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
392 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
393 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
396 /* The real work of replace_goto_queue. Returns with TSI updated to
397 point to the next statement. */
399 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
402 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
404 switch (TREE_CODE (t
))
408 t
= find_goto_replacement (tf
, t
);
411 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
418 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
419 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
422 case TRY_FINALLY_EXPR
:
424 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
425 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
428 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
431 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
438 /* These won't have gotos in them. */
445 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
448 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
450 tree_stmt_iterator i
= tsi_start (t
);
451 while (!tsi_end_p (i
))
452 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
455 /* Replace all goto queue members. */
458 replace_goto_queue (struct leh_tf_state
*tf
)
460 if (tf
->goto_queue_active
== 0)
462 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
465 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
466 node, and if so record that fact in the goto queue associated with that
470 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
472 struct leh_tf_state
*tf
= state
->tf
;
473 struct goto_queue_node
*q
;
480 switch (TREE_CODE (stmt
))
484 tree lab
= GOTO_DESTINATION (stmt
);
486 /* Computed and non-local gotos do not get processed. Given
487 their nature we can neither tell whether we've escaped the
488 finally block nor redirect them if we knew. */
489 if (TREE_CODE (lab
) != LABEL_DECL
)
492 /* No need to record gotos that don't leave the try block. */
493 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
496 if (! tf
->dest_array
)
498 tf
->dest_array
= VEC_alloc (tree
, heap
, 10);
499 VEC_quick_push (tree
, tf
->dest_array
, lab
);
504 int n
= VEC_length (tree
, tf
->dest_array
);
505 for (index
= 0; index
< n
; ++index
)
506 if (VEC_index (tree
, tf
->dest_array
, index
) == lab
)
509 VEC_safe_push (tree
, heap
, tf
->dest_array
, lab
);
515 tf
->may_return
= true;
523 active
= tf
->goto_queue_active
;
524 size
= tf
->goto_queue_size
;
527 size
= (size
? size
* 2 : 32);
528 tf
->goto_queue_size
= size
;
530 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
533 q
= &tf
->goto_queue
[active
];
534 tf
->goto_queue_active
= active
+ 1;
536 memset (q
, 0, sizeof (*q
));
541 #ifdef ENABLE_CHECKING
542 /* We do not process SWITCH_EXPRs for now. As long as the original source
543 was in fact structured, and we've not yet done jump threading, then none
544 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
547 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
549 struct leh_tf_state
*tf
= state
->tf
;
556 vec
= SWITCH_LABELS (switch_expr
);
557 n
= TREE_VEC_LENGTH (vec
);
559 for (i
= 0; i
< n
; ++i
)
561 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
562 gcc_assert (!outside_finally_tree (lab
, tf
->try_finally_expr
));
566 #define verify_norecord_switch_expr(state, switch_expr)
569 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
570 whatever is needed to finish the return. If MOD is non-null, insert it
571 before the new branch. RETURN_VALUE_P is a cache containing a temporary
572 variable to be used in manipulating the value returned from the function. */
575 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
576 tree
*return_value_p
)
578 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
583 /* The nasty part about redirecting the return value is that the
584 return value itself is to be computed before the FINALLY block
598 should return 0, not 1. Arrange for this to happen by copying
599 computed the return value into a local temporary. This also
600 allows us to redirect multiple return statements through the
601 same destination block; whether this is a net win or not really
602 depends, I guess, but it does make generation of the switch in
603 lower_try_finally_switch easier. */
605 switch (TREE_CODE (ret_expr
))
608 if (!*return_value_p
)
609 *return_value_p
= ret_expr
;
611 gcc_assert (*return_value_p
== ret_expr
);
612 q
->cont_stmt
= q
->stmt
;
615 case GIMPLE_MODIFY_STMT
:
617 tree result
= GIMPLE_STMT_OPERAND (ret_expr
, 0);
618 tree
new, old
= GIMPLE_STMT_OPERAND (ret_expr
, 1);
620 if (!*return_value_p
)
622 if (aggregate_value_p (TREE_TYPE (result
),
623 TREE_TYPE (current_function_decl
)))
624 /* If this function returns in memory, copy the argument
625 into the return slot now. Otherwise, we might need to
626 worry about magic return semantics, so we need to use a
627 temporary to hold the value until we're actually ready
631 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
632 *return_value_p
= new;
635 new = *return_value_p
;
637 x
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (new), new, old
);
638 append_to_statement_list (x
, &q
->repl_stmt
);
643 x
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (result
), result
, new);
644 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
653 /* If we don't return a value, all return statements are the same. */
654 q
->cont_stmt
= q
->stmt
;
658 append_to_statement_list (mod
, &q
->repl_stmt
);
660 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
661 append_to_statement_list (x
, &q
->repl_stmt
);
664 /* Similar, but easier, for GOTO_EXPR. */
667 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
671 q
->cont_stmt
= q
->stmt
;
673 append_to_statement_list (mod
, &q
->repl_stmt
);
675 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
676 append_to_statement_list (x
, &q
->repl_stmt
);
679 /* We want to transform
680 try { body; } catch { stuff; }
682 body; goto over; lab: stuff; over:
684 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
685 should be placed before the second operand, or NULL. OVER is
686 an existing label that should be put at the exit, or NULL. */
689 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
693 op1
= TREE_OPERAND (*tp
, 1);
694 *tp
= TREE_OPERAND (*tp
, 0);
696 if (block_may_fallthru (*tp
))
699 over
= create_artificial_label ();
700 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
701 append_to_statement_list (x
, tp
);
706 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
707 append_to_statement_list (x
, tp
);
710 append_to_statement_list (op1
, tp
);
714 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
715 append_to_statement_list (x
, tp
);
719 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
720 Make sure to record all new labels found. */
723 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
727 t
= unsave_expr_now (t
);
730 region
= outer_state
->tf
->try_finally_expr
;
731 collect_finally_tree (t
, region
);
736 /* A subroutine of lower_try_finally. Create a fallthru label for
737 the given try_finally state. The only tricky bit here is that
738 we have to make sure to record the label in our outer context. */
741 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
743 tree label
= tf
->fallthru_label
;
746 label
= create_artificial_label ();
747 tf
->fallthru_label
= label
;
749 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
754 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
755 returns non-null, then the language requires that the exception path out
756 of a try_finally be treated specially. To wit: the code within the
757 finally block may not itself throw an exception. We have two choices here.
758 First we can duplicate the finally block and wrap it in a must_not_throw
759 region. Second, we can generate code like
764 if (fintmp == eh_edge)
765 protect_cleanup_actions;
768 where "fintmp" is the temporary used in the switch statement generation
769 alternative considered below. For the nonce, we always choose the first
772 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
775 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
776 struct leh_state
*this_state
,
777 struct leh_tf_state
*tf
)
779 tree protect_cleanup_actions
, finally
, x
;
780 tree_stmt_iterator i
;
781 bool finally_may_fallthru
;
783 /* First check for nothing to do. */
784 if (lang_protect_cleanup_actions
)
785 protect_cleanup_actions
= lang_protect_cleanup_actions ();
787 protect_cleanup_actions
= NULL
;
789 finally
= TREE_OPERAND (*tf
->top_p
, 1);
791 /* If the EH case of the finally block can fall through, this may be a
792 structure of the form
805 E.g. with an inline destructor with an embedded try block. In this
806 case we must save the runtime EH data around the nested exception.
808 This complication means that any time the previous runtime data might
809 be used (via fallthru from the finally) we handle the eh case here,
810 whether or not protect_cleanup_actions is active. */
812 finally_may_fallthru
= block_may_fallthru (finally
);
813 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
816 /* Duplicate the FINALLY block. Only need to do this for try-finally,
817 and not for cleanups. */
819 finally
= lower_try_finally_dup_block (finally
, outer_state
);
821 /* Resume execution after the exception. Adding this now lets
822 lower_eh_filter not add unnecessary gotos, as it is clear that
823 we never fallthru from this copy of the finally block. */
824 if (finally_may_fallthru
)
826 tree save_eptr
, save_filt
;
828 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
829 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
831 i
= tsi_start (finally
);
832 x
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
833 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, save_eptr
, x
);
834 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
836 x
= build0 (FILTER_EXPR
, integer_type_node
);
837 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, save_filt
, x
);
838 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
840 i
= tsi_last (finally
);
841 x
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
842 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, x
, save_eptr
);
843 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
845 x
= build0 (FILTER_EXPR
, integer_type_node
);
846 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, x
, save_filt
);
847 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
849 x
= build_resx (get_eh_region_number (tf
->region
));
850 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
853 /* Wrap the block with protect_cleanup_actions as the action. */
854 if (protect_cleanup_actions
)
856 x
= build2 (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
857 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
858 EH_FILTER_MUST_NOT_THROW (x
) = 1;
859 finally
= build2 (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
860 lower_eh_filter (outer_state
, &finally
);
863 lower_eh_constructs_1 (outer_state
, &finally
);
865 /* Hook this up to the end of the existing try block. If we
866 previously fell through the end, we'll have to branch around.
867 This means adding a new goto, and adding it to the queue. */
869 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
871 if (tf
->may_fallthru
)
873 x
= lower_try_finally_fallthru_label (tf
);
874 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
875 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
878 maybe_record_in_goto_queue (this_state
, x
);
880 tf
->may_fallthru
= false;
883 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
884 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
885 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
887 /* Having now been handled, EH isn't to be considered with
888 the rest of the outgoing edges. */
889 tf
->may_throw
= false;
892 /* A subroutine of lower_try_finally. We have determined that there is
893 no fallthru edge out of the finally block. This means that there is
894 no outgoing edge corresponding to any incoming edge. Restructure the
895 try_finally node for this special case. */
898 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
900 tree x
, finally
, lab
, return_val
;
901 struct goto_queue_node
*q
, *qe
;
906 lab
= create_artificial_label ();
908 finally
= TREE_OPERAND (*tf
->top_p
, 1);
909 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
911 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
912 append_to_statement_list (x
, tf
->top_p
);
916 qe
= q
+ tf
->goto_queue_active
;
919 do_return_redirection (q
, lab
, NULL
, &return_val
);
921 do_goto_redirection (q
, lab
, NULL
);
923 replace_goto_queue (tf
);
925 lower_eh_constructs_1 (state
, &finally
);
926 append_to_statement_list (finally
, tf
->top_p
);
929 /* A subroutine of lower_try_finally. We have determined that there is
930 exactly one destination of the finally block. Restructure the
931 try_finally node for this special case. */
934 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
936 struct goto_queue_node
*q
, *qe
;
937 tree x
, finally
, finally_label
;
939 finally
= TREE_OPERAND (*tf
->top_p
, 1);
940 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
942 lower_eh_constructs_1 (state
, &finally
);
946 /* Only reachable via the exception edge. Add the given label to
947 the head of the FINALLY block. Append a RESX at the end. */
949 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
950 append_to_statement_list (x
, tf
->top_p
);
952 append_to_statement_list (finally
, tf
->top_p
);
954 x
= build_resx (get_eh_region_number (tf
->region
));
956 append_to_statement_list (x
, tf
->top_p
);
961 if (tf
->may_fallthru
)
963 /* Only reachable via the fallthru edge. Do nothing but let
964 the two blocks run together; we'll fall out the bottom. */
965 append_to_statement_list (finally
, tf
->top_p
);
969 finally_label
= create_artificial_label ();
970 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
971 append_to_statement_list (x
, tf
->top_p
);
973 append_to_statement_list (finally
, tf
->top_p
);
976 qe
= q
+ tf
->goto_queue_active
;
980 /* Reachable by return expressions only. Redirect them. */
981 tree return_val
= NULL
;
983 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
984 replace_goto_queue (tf
);
988 /* Reachable by goto expressions only. Redirect them. */
990 do_goto_redirection (q
, finally_label
, NULL
);
991 replace_goto_queue (tf
);
993 if (VEC_index (tree
, tf
->dest_array
, 0) == tf
->fallthru_label
)
995 /* Reachable by goto to fallthru label only. Redirect it
996 to the new label (already created, sadly), and do not
997 emit the final branch out, or the fallthru label. */
998 tf
->fallthru_label
= NULL
;
1003 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
1004 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
1007 /* A subroutine of lower_try_finally. There are multiple edges incoming
1008 and outgoing from the finally block. Implement this by duplicating the
1009 finally block for every destination. */
1012 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1014 tree finally
, new_stmt
;
1017 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1018 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1020 new_stmt
= NULL_TREE
;
1022 if (tf
->may_fallthru
)
1024 x
= lower_try_finally_dup_block (finally
, state
);
1025 lower_eh_constructs_1 (state
, &x
);
1026 append_to_statement_list (x
, &new_stmt
);
1028 x
= lower_try_finally_fallthru_label (tf
);
1029 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1030 append_to_statement_list (x
, &new_stmt
);
1035 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1036 append_to_statement_list (x
, &new_stmt
);
1038 x
= lower_try_finally_dup_block (finally
, state
);
1039 lower_eh_constructs_1 (state
, &x
);
1040 append_to_statement_list (x
, &new_stmt
);
1042 x
= build_resx (get_eh_region_number (tf
->region
));
1043 append_to_statement_list (x
, &new_stmt
);
1048 struct goto_queue_node
*q
, *qe
;
1049 tree return_val
= NULL
;
1050 int return_index
, index
;
1053 struct goto_queue_node
*q
;
1057 return_index
= VEC_length (tree
, tf
->dest_array
);
1058 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1061 qe
= q
+ tf
->goto_queue_active
;
1064 index
= q
->index
< 0 ? return_index
: q
->index
;
1066 if (!labels
[index
].q
)
1067 labels
[index
].q
= q
;
1070 for (index
= 0; index
< return_index
+ 1; index
++)
1074 q
= labels
[index
].q
;
1078 lab
= labels
[index
].label
= create_artificial_label ();
1080 if (index
== return_index
)
1081 do_return_redirection (q
, lab
, NULL
, &return_val
);
1083 do_goto_redirection (q
, lab
, NULL
);
1085 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1086 append_to_statement_list (x
, &new_stmt
);
1088 x
= lower_try_finally_dup_block (finally
, state
);
1089 lower_eh_constructs_1 (state
, &x
);
1090 append_to_statement_list (x
, &new_stmt
);
1092 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1093 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1096 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1100 index
= q
->index
< 0 ? return_index
: q
->index
;
1102 if (labels
[index
].q
== q
)
1105 lab
= labels
[index
].label
;
1107 if (index
== return_index
)
1108 do_return_redirection (q
, lab
, NULL
, &return_val
);
1110 do_goto_redirection (q
, lab
, NULL
);
1113 replace_goto_queue (tf
);
1117 /* Need to link new stmts after running replace_goto_queue due
1118 to not wanting to process the same goto stmts twice. */
1119 append_to_statement_list (new_stmt
, tf
->top_p
);
1122 /* A subroutine of lower_try_finally. There are multiple edges incoming
1123 and outgoing from the finally block. Implement this by instrumenting
1124 each incoming edge and creating a switch statement at the end of the
1125 finally block that branches to the appropriate destination. */
1128 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1130 struct goto_queue_node
*q
, *qe
;
1131 tree return_val
= NULL
;
1132 tree finally
, finally_tmp
, finally_label
;
1133 int return_index
, eh_index
, fallthru_index
;
1134 int nlabels
, ndests
, j
, last_case_index
;
1135 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1138 /* Mash the TRY block to the head of the chain. */
1139 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1140 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1142 /* Lower the finally block itself. */
1143 lower_eh_constructs_1 (state
, &finally
);
1145 /* Prepare for switch statement generation. */
1146 nlabels
= VEC_length (tree
, tf
->dest_array
);
1147 return_index
= nlabels
;
1148 eh_index
= return_index
+ tf
->may_return
;
1149 fallthru_index
= eh_index
+ tf
->may_throw
;
1150 ndests
= fallthru_index
+ tf
->may_fallthru
;
1152 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1153 finally_label
= create_artificial_label ();
1155 case_label_vec
= make_tree_vec (ndests
);
1156 switch_stmt
= build3 (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1157 NULL_TREE
, case_label_vec
);
1160 last_case_index
= 0;
1162 /* Begin inserting code for getting to the finally block. Things
1163 are done in this order to correspond to the sequence the code is
1166 if (tf
->may_fallthru
)
1168 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, finally_tmp
,
1169 build_int_cst (NULL_TREE
, fallthru_index
));
1170 append_to_statement_list (x
, tf
->top_p
);
1174 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1175 append_to_statement_list (x
, tf
->top_p
);
1179 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1180 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1181 create_artificial_label ());
1182 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1185 x
= build1 (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1186 append_to_statement_list (x
, &switch_body
);
1188 x
= lower_try_finally_fallthru_label (tf
);
1189 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1190 append_to_statement_list (x
, &switch_body
);
1195 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1196 append_to_statement_list (x
, tf
->top_p
);
1198 x
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, finally_tmp
,
1199 build_int_cst (NULL_TREE
, eh_index
));
1200 append_to_statement_list (x
, tf
->top_p
);
1202 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1203 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1204 create_artificial_label ());
1205 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1208 x
= build1 (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1209 append_to_statement_list (x
, &switch_body
);
1210 x
= build_resx (get_eh_region_number (tf
->region
));
1211 append_to_statement_list (x
, &switch_body
);
1214 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1215 append_to_statement_list (x
, tf
->top_p
);
1217 append_to_statement_list (finally
, tf
->top_p
);
1219 /* Redirect each incoming goto edge. */
1221 qe
= q
+ tf
->goto_queue_active
;
1222 j
= last_case_index
+ tf
->may_return
;
1226 int switch_id
, case_index
;
1230 mod
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, finally_tmp
,
1231 build_int_cst (NULL_TREE
, return_index
));
1232 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1233 switch_id
= return_index
;
1237 mod
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, finally_tmp
,
1238 build_int_cst (NULL_TREE
, q
->index
));
1239 do_goto_redirection (q
, finally_label
, mod
);
1240 switch_id
= q
->index
;
1243 case_index
= j
+ q
->index
;
1244 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1245 TREE_VEC_ELT (case_label_vec
, case_index
)
1246 = build3 (CASE_LABEL_EXPR
, void_type_node
,
1247 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1248 /* We store the cont_stmt in the
1249 CASE_LABEL, so that we can recover it
1250 in the loop below. We don't create
1251 the new label while walking the
1252 goto_queue because pointers don't
1253 offer a stable order. */
1256 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1261 last_case
= TREE_VEC_ELT (case_label_vec
, j
);
1263 gcc_assert (last_case
);
1265 cont_stmt
= CASE_LABEL (last_case
);
1267 label
= create_artificial_label ();
1268 CASE_LABEL (last_case
) = label
;
1270 x
= build1 (LABEL_EXPR
, void_type_node
, label
);
1271 append_to_statement_list (x
, &switch_body
);
1272 append_to_statement_list (cont_stmt
, &switch_body
);
1273 maybe_record_in_goto_queue (state
, cont_stmt
);
1275 replace_goto_queue (tf
);
1277 /* Make sure that the last case is the default label, as one is required.
1278 Then sort the labels, which is also required in GIMPLE. */
1279 CASE_LOW (last_case
) = NULL
;
1280 sort_case_labels (case_label_vec
);
1282 /* Need to link switch_stmt after running replace_goto_queue due
1283 to not wanting to process the same goto stmts twice. */
1284 append_to_statement_list (switch_stmt
, tf
->top_p
);
1285 append_to_statement_list (switch_body
, tf
->top_p
);
1288 /* Decide whether or not we are going to duplicate the finally block.
1289 There are several considerations.
1291 First, if this is Java, then the finally block contains code
1292 written by the user. It has line numbers associated with it,
1293 so duplicating the block means it's difficult to set a breakpoint.
1294 Since controlling code generation via -g is verboten, we simply
1295 never duplicate code without optimization.
1297 Second, we'd like to prevent egregious code growth. One way to
1298 do this is to estimate the size of the finally block, multiply
1299 that by the number of copies we'd need to make, and compare against
1300 the estimate of the size of the switch machinery we'd have to add. */
1303 decide_copy_try_finally (int ndests
, tree finally
)
1305 int f_estimate
, sw_estimate
;
1310 /* Finally estimate N times, plus N gotos. */
1311 f_estimate
= estimate_num_insns (finally
, &eni_size_weights
);
1312 f_estimate
= (f_estimate
+ 1) * ndests
;
1314 /* Switch statement (cost 10), N variable assignments, N gotos. */
1315 sw_estimate
= 10 + 2 * ndests
;
1317 /* Optimize for size clearly wants our best guess. */
1319 return f_estimate
< sw_estimate
;
1321 /* ??? These numbers are completely made up so far. */
1323 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1325 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1328 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1329 to a sequence of labels and blocks, plus the exception region trees
1330 that record all the magic. This is complicated by the need to
1331 arrange for the FINALLY block to be executed on all exits. */
1334 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1336 struct leh_tf_state this_tf
;
1337 struct leh_state this_state
;
1340 /* Process the try block. */
1342 memset (&this_tf
, 0, sizeof (this_tf
));
1343 this_tf
.try_finally_expr
= *tp
;
1345 this_tf
.outer
= state
;
1346 if (using_eh_for_cleanups_p
)
1348 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1350 this_tf
.region
= NULL
;
1352 this_state
.cur_region
= this_tf
.region
;
1353 this_state
.prev_try
= state
->prev_try
;
1354 this_state
.tf
= &this_tf
;
1356 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1358 /* Determine if the try block is escaped through the bottom. */
1359 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1361 /* Determine if any exceptions are possible within the try block. */
1362 if (using_eh_for_cleanups_p
)
1363 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1364 if (this_tf
.may_throw
)
1366 this_tf
.eh_label
= create_artificial_label ();
1367 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1368 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1371 /* Sort the goto queue for efficient searching later. */
1372 if (this_tf
.goto_queue_active
> 1)
1373 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1374 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1376 /* Determine how many edges (still) reach the finally block. Or rather,
1377 how many destinations are reached by the finally block. Use this to
1378 determine how we process the finally block itself. */
1380 ndests
= VEC_length (tree
, this_tf
.dest_array
);
1381 ndests
+= this_tf
.may_fallthru
;
1382 ndests
+= this_tf
.may_return
;
1383 ndests
+= this_tf
.may_throw
;
1385 /* If the FINALLY block is not reachable, dike it out. */
1387 *tp
= TREE_OPERAND (*tp
, 0);
1389 /* If the finally block doesn't fall through, then any destination
1390 we might try to impose there isn't reached either. There may be
1391 some minor amount of cleanup and redirection still needed. */
1392 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1393 lower_try_finally_nofallthru (state
, &this_tf
);
1395 /* We can easily special-case redirection to a single destination. */
1396 else if (ndests
== 1)
1397 lower_try_finally_onedest (state
, &this_tf
);
1399 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1400 lower_try_finally_copy (state
, &this_tf
);
1402 lower_try_finally_switch (state
, &this_tf
);
1404 /* If someone requested we add a label at the end of the transformed
1406 if (this_tf
.fallthru_label
)
1408 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1409 append_to_statement_list (x
, tp
);
1412 VEC_free (tree
, heap
, this_tf
.dest_array
);
1413 if (this_tf
.goto_queue
)
1414 free (this_tf
.goto_queue
);
1417 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1418 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1419 exception region trees that record all the magic. */
1422 lower_catch (struct leh_state
*state
, tree
*tp
)
1424 struct eh_region
*try_region
;
1425 struct leh_state this_state
;
1426 tree_stmt_iterator i
;
1429 try_region
= gen_eh_region_try (state
->cur_region
);
1430 this_state
.cur_region
= try_region
;
1431 this_state
.prev_try
= try_region
;
1432 this_state
.tf
= state
->tf
;
1434 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1436 if (!get_eh_region_may_contain_throw (try_region
))
1438 *tp
= TREE_OPERAND (*tp
, 0);
1443 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1445 struct eh_region
*catch_region
;
1446 tree
catch, x
, eh_label
;
1448 catch = tsi_stmt (i
);
1449 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1451 this_state
.cur_region
= catch_region
;
1452 this_state
.prev_try
= state
->prev_try
;
1453 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1455 eh_label
= create_artificial_label ();
1456 set_eh_region_tree_label (catch_region
, eh_label
);
1458 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1459 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1461 if (block_may_fallthru (CATCH_BODY (catch)))
1464 out_label
= create_artificial_label ();
1466 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1467 append_to_statement_list (x
, &CATCH_BODY (catch));
1470 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1474 frob_into_branch_around (tp
, NULL
, out_label
);
1477 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1478 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1479 region trees that record all the magic. */
1482 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1484 struct leh_state this_state
;
1485 struct eh_region
*this_region
;
1486 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1489 if (EH_FILTER_MUST_NOT_THROW (inner
))
1490 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1492 this_region
= gen_eh_region_allowed (state
->cur_region
,
1493 EH_FILTER_TYPES (inner
));
1494 this_state
= *state
;
1495 this_state
.cur_region
= this_region
;
1497 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1499 if (!get_eh_region_may_contain_throw (this_region
))
1501 *tp
= TREE_OPERAND (*tp
, 0);
1505 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1506 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1508 eh_label
= create_artificial_label ();
1509 set_eh_region_tree_label (this_region
, eh_label
);
1511 frob_into_branch_around (tp
, eh_label
, NULL
);
1514 /* Implement a cleanup expression. This is similar to try-finally,
1515 except that we only execute the cleanup block for exception edges. */
1518 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1520 struct leh_state this_state
;
1521 struct eh_region
*this_region
;
1522 struct leh_tf_state fake_tf
;
1524 /* If not using eh, then exception-only cleanups are no-ops. */
1525 if (!flag_exceptions
)
1527 *tp
= TREE_OPERAND (*tp
, 0);
1528 lower_eh_constructs_1 (state
, tp
);
1532 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1533 this_state
= *state
;
1534 this_state
.cur_region
= this_region
;
1536 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1538 if (!get_eh_region_may_contain_throw (this_region
))
1540 *tp
= TREE_OPERAND (*tp
, 0);
1544 /* Build enough of a try-finally state so that we can reuse
1545 honor_protect_cleanup_actions. */
1546 memset (&fake_tf
, 0, sizeof (fake_tf
));
1548 fake_tf
.outer
= state
;
1549 fake_tf
.region
= this_region
;
1550 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1551 fake_tf
.may_throw
= true;
1553 fake_tf
.eh_label
= create_artificial_label ();
1554 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1556 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1558 if (fake_tf
.may_throw
)
1560 /* In this case honor_protect_cleanup_actions had nothing to do,
1561 and we should process this normally. */
1562 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1563 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1567 /* In this case honor_protect_cleanup_actions did nearly all of
1568 the work. All we have left is to append the fallthru_label. */
1570 *tp
= TREE_OPERAND (*tp
, 0);
1571 if (fake_tf
.fallthru_label
)
1573 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1574 append_to_statement_list (x
, tp
);
1579 /* Main loop for lowering eh constructs. */
1582 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1584 tree_stmt_iterator i
;
1587 switch (TREE_CODE (t
))
1590 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1591 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1595 /* Look for things that can throw exceptions, and record them. */
1596 if (state
->cur_region
&& tree_could_throw_p (t
))
1598 record_stmt_eh_region (state
->cur_region
, t
);
1599 note_eh_region_may_contain_throw (state
->cur_region
);
1603 case GIMPLE_MODIFY_STMT
:
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state
->cur_region
&& tree_could_throw_p (t
))
1607 record_stmt_eh_region (state
->cur_region
, t
);
1608 note_eh_region_may_contain_throw (state
->cur_region
);
1614 maybe_record_in_goto_queue (state
, t
);
1617 verify_norecord_switch_expr (state
, t
);
1620 case TRY_FINALLY_EXPR
:
1621 lower_try_finally (state
, tp
);
1624 case TRY_CATCH_EXPR
:
1625 i
= tsi_start (TREE_OPERAND (t
, 1));
1626 switch (TREE_CODE (tsi_stmt (i
)))
1629 lower_catch (state
, tp
);
1631 case EH_FILTER_EXPR
:
1632 lower_eh_filter (state
, tp
);
1635 lower_cleanup (state
, tp
);
1640 case STATEMENT_LIST
:
1641 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1643 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1645 if (TREE_CODE (t
) == STATEMENT_LIST
)
1647 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1656 /* A type, a decl, or some kind of statement that we're not
1657 interested in. Don't walk them. */
1663 lower_eh_constructs (void)
1665 struct leh_state null_state
;
1666 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1668 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1670 collect_finally_tree (*tp
, NULL
);
1672 memset (&null_state
, 0, sizeof (null_state
));
1673 lower_eh_constructs_1 (&null_state
, tp
);
1675 htab_delete (finally_tree
);
1677 collect_eh_region_array ();
1681 struct tree_opt_pass pass_lower_eh
=
1685 lower_eh_constructs
, /* execute */
1688 0, /* static_pass_number */
1689 TV_TREE_EH
, /* tv_id */
1690 PROP_gimple_lcf
, /* properties_required */
1691 PROP_gimple_leh
, /* properties_provided */
1692 0, /* properties_destroyed */
1693 0, /* todo_flags_start */
1694 TODO_dump_func
, /* todo_flags_finish */
1699 /* Construct EH edges for STMT. */
1702 make_eh_edge (struct eh_region
*region
, void *data
)
1705 basic_block src
, dst
;
1708 lab
= get_eh_region_tree_label (region
);
1710 src
= bb_for_stmt (stmt
);
1711 dst
= label_to_block (lab
);
1713 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1717 make_eh_edges (tree stmt
)
1722 if (TREE_CODE (stmt
) == RESX_EXPR
)
1724 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1729 region_nr
= lookup_stmt_eh_region (stmt
);
1735 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1738 static bool mark_eh_edge_found_error
;
1740 /* Mark edge make_eh_edge would create for given region by setting it aux
1741 field, output error if something goes wrong. */
1743 mark_eh_edge (struct eh_region
*region
, void *data
)
1746 basic_block src
, dst
;
1750 lab
= get_eh_region_tree_label (region
);
1752 src
= bb_for_stmt (stmt
);
1753 dst
= label_to_block (lab
);
1755 e
= find_edge (src
, dst
);
1758 error ("EH edge %i->%i is missing", src
->index
, dst
->index
);
1759 mark_eh_edge_found_error
= true;
1761 else if (!(e
->flags
& EDGE_EH
))
1763 error ("EH edge %i->%i miss EH flag", src
->index
, dst
->index
);
1764 mark_eh_edge_found_error
= true;
1768 /* ??? might not be mistake. */
1769 error ("EH edge %i->%i has duplicated regions", src
->index
, dst
->index
);
1770 mark_eh_edge_found_error
= true;
1776 /* Verify that BB containing stmt as last stmt has precisely the edges
1777 make_eh_edges would create. */
1779 verify_eh_edges (tree stmt
)
1783 basic_block bb
= bb_for_stmt (stmt
);
1787 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1788 gcc_assert (!e
->aux
);
1789 mark_eh_edge_found_error
= false;
1790 if (TREE_CODE (stmt
) == RESX_EXPR
)
1792 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1797 region_nr
= lookup_stmt_eh_region (stmt
);
1800 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1801 if (e
->flags
& EDGE_EH
)
1803 error ("BB %i can not throw but has EH edges", bb
->index
);
1808 if (!tree_could_throw_p (stmt
))
1810 error ("BB %i last statement has incorrectly set region", bb
->index
);
1816 foreach_reachable_handler (region_nr
, is_resx
, mark_eh_edge
, stmt
);
1817 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1819 if ((e
->flags
& EDGE_EH
) && !e
->aux
)
1821 error ("unnecessary EH edge %i->%i", bb
->index
, e
->dest
->index
);
1822 mark_eh_edge_found_error
= true;
1827 return mark_eh_edge_found_error
;
1831 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1832 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1833 This routine expects only GIMPLE lhs or rhs input. */
1836 tree_could_trap_p (tree expr
)
1838 enum tree_code code
= TREE_CODE (expr
);
1839 bool honor_nans
= false;
1840 bool honor_snans
= false;
1841 bool fp_operation
= false;
1842 bool honor_trapv
= false;
1845 if (TREE_CODE_CLASS (code
) == tcc_comparison
1846 || TREE_CODE_CLASS (code
) == tcc_unary
1847 || TREE_CODE_CLASS (code
) == tcc_binary
)
1849 t
= TREE_TYPE (expr
);
1850 fp_operation
= FLOAT_TYPE_P (t
);
1853 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1854 honor_snans
= flag_signaling_nans
!= 0;
1856 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
1863 case TARGET_MEM_REF
:
1864 /* For TARGET_MEM_REFs use the information based on the original
1866 expr
= TMR_ORIGINAL (expr
);
1867 code
= TREE_CODE (expr
);
1874 case VIEW_CONVERT_EXPR
:
1875 case WITH_SIZE_EXPR
:
1876 expr
= TREE_OPERAND (expr
, 0);
1877 code
= TREE_CODE (expr
);
1880 case ARRAY_RANGE_REF
:
1881 base
= TREE_OPERAND (expr
, 0);
1882 if (tree_could_trap_p (base
))
1885 if (TREE_THIS_NOTRAP (expr
))
1888 return !range_in_array_bounds_p (expr
);
1891 base
= TREE_OPERAND (expr
, 0);
1892 if (tree_could_trap_p (base
))
1895 if (TREE_THIS_NOTRAP (expr
))
1898 return !in_array_bounds_p (expr
);
1901 case ALIGN_INDIRECT_REF
:
1902 case MISALIGNED_INDIRECT_REF
:
1903 return !TREE_THIS_NOTRAP (expr
);
1906 return TREE_THIS_VOLATILE (expr
);
1908 case TRUNC_DIV_EXPR
:
1910 case FLOOR_DIV_EXPR
:
1911 case ROUND_DIV_EXPR
:
1912 case EXACT_DIV_EXPR
:
1914 case FLOOR_MOD_EXPR
:
1915 case ROUND_MOD_EXPR
:
1916 case TRUNC_MOD_EXPR
:
1918 if (honor_snans
|| honor_trapv
)
1921 return flag_trapping_math
;
1922 t
= TREE_OPERAND (expr
, 1);
1923 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1932 /* Some floating point comparisons may trap. */
1937 case UNORDERED_EXPR
:
1947 case FIX_TRUNC_EXPR
:
1948 /* Conversion of floating point might trap. */
1954 /* These operations don't trap with floating point. */
1962 /* Any floating arithmetic may trap. */
1963 if (fp_operation
&& flag_trapping_math
)
1970 t
= get_callee_fndecl (expr
);
1971 /* Assume that calls to weak functions may trap. */
1972 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1977 /* Any floating arithmetic may trap. */
1978 if (fp_operation
&& flag_trapping_math
)
1985 tree_could_throw_p (tree t
)
1987 if (!flag_exceptions
)
1989 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
1991 if (flag_non_call_exceptions
1992 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t
, 0)))
1994 t
= GIMPLE_STMT_OPERAND (t
, 1);
1997 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
1998 t
= TREE_OPERAND (t
, 0);
1999 if (TREE_CODE (t
) == CALL_EXPR
)
2000 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2001 if (flag_non_call_exceptions
)
2002 return tree_could_trap_p (t
);
2007 tree_can_throw_internal (tree stmt
)
2010 bool is_resx
= false;
2012 if (TREE_CODE (stmt
) == RESX_EXPR
)
2013 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2015 region_nr
= lookup_stmt_eh_region (stmt
);
2018 return can_throw_internal_1 (region_nr
, is_resx
);
2022 tree_can_throw_external (tree stmt
)
2025 bool is_resx
= false;
2027 if (TREE_CODE (stmt
) == RESX_EXPR
)
2028 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2030 region_nr
= lookup_stmt_eh_region (stmt
);
2032 return tree_could_throw_p (stmt
);
2034 return can_throw_external_1 (region_nr
, is_resx
);
2037 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2038 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2039 in the table if it should be in there. Return TRUE if a replacement was
2040 done that my require an EH edge purge. */
2043 maybe_clean_or_replace_eh_stmt (tree old_stmt
, tree new_stmt
)
2045 int region_nr
= lookup_stmt_eh_region (old_stmt
);
2049 bool new_stmt_could_throw
= tree_could_throw_p (new_stmt
);
2051 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2054 remove_stmt_from_eh_region (old_stmt
);
2055 if (new_stmt_could_throw
)
2057 add_stmt_to_eh_region (new_stmt
, region_nr
);