1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree
*tp
; tree t
; gimple g
;} treemple
;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p
= 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p
= 1;
55 /* Misc functions used in this file. */
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
62 struct_ptr_eq (const void *a
, const void *b
)
64 const void * const * x
= (const void * const *) a
;
65 const void * const * y
= (const void * const *) b
;
70 struct_ptr_hash (const void *a
)
72 const void * const * x
= (const void * const *) a
;
73 return (size_t)*x
>> 4;
77 /* Remember and lookup EH region data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
90 record_stmt_eh_region (struct eh_region_d
*region
, gimple t
)
95 add_stmt_to_eh_region (t
, get_eh_region_number (region
));
99 /* Add statement T in function IFUN to EH region NUM. */
102 add_stmt_to_eh_region_fn (struct function
*ifun
, gimple t
, int num
)
104 struct throw_stmt_node
*n
;
107 gcc_assert (num
>= 0);
108 gcc_assert (gimple_code (t
) != GIMPLE_RESX
);
110 n
= GGC_NEW (struct throw_stmt_node
);
114 if (!get_eh_throw_stmt_table (ifun
))
115 set_eh_throw_stmt_table (ifun
, htab_create_ggc (31, struct_ptr_hash
,
119 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), n
, INSERT
);
125 /* Add statement T in the current function (cfun) to EH region number
129 add_stmt_to_eh_region (gimple t
, int num
)
131 add_stmt_to_eh_region_fn (cfun
, t
, num
);
135 /* Remove statement T in function IFUN from the EH region holding it. */
138 remove_stmt_from_eh_region_fn (struct function
*ifun
, gimple t
)
140 struct throw_stmt_node dummy
;
143 if (!get_eh_throw_stmt_table (ifun
))
147 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), &dummy
,
151 htab_clear_slot (get_eh_throw_stmt_table (ifun
), slot
);
159 /* Remove statement T in the current function (cfun) from the EH
160 region holding it. */
163 remove_stmt_from_eh_region (gimple t
)
165 return remove_stmt_from_eh_region_fn (cfun
, t
);
168 /* Determine if statement T is inside an EH region in function IFUN.
169 Return the EH region number if found, return -2 if IFUN does not
170 have an EH table and -1 if T could not be found in IFUN's EH region
174 lookup_stmt_eh_region_fn (struct function
*ifun
, gimple t
)
176 struct throw_stmt_node
*p
, n
;
178 if (!get_eh_throw_stmt_table (ifun
))
182 p
= (struct throw_stmt_node
*) htab_find (get_eh_throw_stmt_table (ifun
), &n
);
183 return (p
? p
->region_nr
: -1);
187 /* Determine if statement T is inside an EH region in the current
188 function (cfun). Return the EH region number if found, return -2
189 if cfun does not have an EH table and -1 if T could not be found in
190 cfun's EH region table. */
193 lookup_stmt_eh_region (gimple t
)
195 /* We can get called from initialized data when -fnon-call-exceptions
196 is on; prevent crash. */
200 return lookup_stmt_eh_region_fn (cfun
, t
);
204 /* Determine if expression T is inside an EH region in the current
205 function (cfun). Return the EH region number if found, return -2
206 if IFUN does not have an EH table and -1 if T could not be found in
207 IFUN's EH region table. */
210 lookup_expr_eh_region (tree t
)
212 /* We can get called from initialized data when -fnon-call-exceptions
213 is on; prevent crash. */
217 if (!get_eh_throw_stmt_table (cfun
))
222 tree_ann_common_t ann
= tree_common_ann (t
);
224 return (int) ann
->rn
;
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
232 nodes and LABEL_DECL nodes. We will use this during the second phase to
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
235 struct finally_tree_node
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a
239 tree) leaves the TRY block, its necessary to record a tree in
240 this field. Thus a treemple is used. */
245 /* Note that this table is *not* marked GTY. It is short-lived. */
246 static htab_t finally_tree
;
249 record_in_finally_tree (treemple child
, gimple parent
)
251 struct finally_tree_node
*n
;
254 n
= XNEW (struct finally_tree_node
);
258 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
264 collect_finally_tree (gimple stmt
, gimple region
);
266 /* Go through the gimple sequence. Works with collect_finally_tree to
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
270 collect_finally_tree_1 (gimple_seq seq
, gimple region
)
272 gimple_stmt_iterator gsi
;
274 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
275 collect_finally_tree (gsi_stmt (gsi
), region
);
279 collect_finally_tree (gimple stmt
, gimple region
)
283 switch (gimple_code (stmt
))
286 temp
.t
= gimple_label_label (stmt
);
287 record_in_finally_tree (temp
, region
);
291 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
294 record_in_finally_tree (temp
, region
);
295 collect_finally_tree_1 (gimple_try_eval (stmt
), stmt
);
296 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
298 else if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
300 collect_finally_tree_1 (gimple_try_eval (stmt
), region
);
301 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
306 collect_finally_tree_1 (gimple_catch_handler (stmt
), region
);
309 case GIMPLE_EH_FILTER
:
310 collect_finally_tree_1 (gimple_eh_filter_failure (stmt
), region
);
314 /* A type, a decl, or some kind of statement that we're not
315 interested in. Don't walk them. */
321 /* Use the finally tree to determine if a jump from START to TARGET
322 would leave the try_finally node that START lives in. */
325 outside_finally_tree (treemple start
, gimple target
)
327 struct finally_tree_node n
, *p
;
332 p
= (struct finally_tree_node
*) htab_find (finally_tree
, &n
);
337 while (start
.g
!= target
);
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
343 nodes into a set of gotos, magic labels, and eh regions.
344 The eh region creation is straight-forward, but frobbing all the gotos
345 and such into shape isn't. */
347 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
348 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
349 The idea is to record a gimple statement for everything except for
350 the conditionals, which get their labels recorded. Since labels are
351 of type 'tree', we need this node to store both gimple and tree
352 objects. REPL_STMT is the sequence used to replace the goto/return
353 statement. CONT_STMT is used to store the statement that allows
354 the return/goto to jump to the original destination. */
356 struct goto_queue_node
359 gimple_seq repl_stmt
;
362 /* This is used when index >= 0 to indicate that stmt is a label (as
363 opposed to a goto stmt). */
367 /* State of the world while lowering. */
371 /* What's "current" while constructing the eh region tree. These
372 correspond to variables of the same name in cfun->eh, which we
373 don't have easy access to. */
374 struct eh_region_d
*cur_region
;
376 /* Processing of TRY_FINALLY requires a bit more state. This is
377 split out into a separate structure so that we don't have to
378 copy so much when processing other nodes. */
379 struct leh_tf_state
*tf
;
384 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
385 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
386 this so that outside_finally_tree can reliably reference the tree used
387 in the collect_finally_tree data structures. */
388 gimple try_finally_expr
;
390 /* While lowering a top_p usually it is expanded into multiple statements,
391 thus we need the following field to store them. */
392 gimple_seq top_p_seq
;
394 /* The state outside this try_finally node. */
395 struct leh_state
*outer
;
397 /* The exception region created for it. */
398 struct eh_region_d
*region
;
400 /* The goto queue. */
401 struct goto_queue_node
*goto_queue
;
402 size_t goto_queue_size
;
403 size_t goto_queue_active
;
405 /* Pointer map to help in searching goto_queue when it is large. */
406 struct pointer_map_t
*goto_queue_map
;
408 /* The set of unique labels seen as entries in the goto queue. */
409 VEC(tree
,heap
) *dest_array
;
411 /* A label to be added at the end of the completed transformed
412 sequence. It will be set if may_fallthru was true *at one time*,
413 though subsequent transformations may have cleared that flag. */
416 /* A label that has been registered with except.c to be the
417 landing pad for this try block. */
420 /* True if it is possible to fall out the bottom of the try block.
421 Cleared if the fallthru is converted to a goto. */
424 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
427 /* True if the finally block can receive an exception edge.
428 Cleared if the exception case is handled by code duplication. */
432 static gimple_seq
lower_eh_filter (struct leh_state
*, gimple
);
434 /* Search for STMT in the goto queue. Return the replacement,
435 or null if the statement isn't in the queue. */
437 #define LARGE_GOTO_QUEUE 20
439 static void lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq seq
);
442 find_goto_replacement (struct leh_tf_state
*tf
, treemple stmt
)
447 if (tf
->goto_queue_active
< LARGE_GOTO_QUEUE
)
449 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
450 if ( tf
->goto_queue
[i
].stmt
.g
== stmt
.g
)
451 return tf
->goto_queue
[i
].repl_stmt
;
455 /* If we have a large number of entries in the goto_queue, create a
456 pointer map and use that for searching. */
458 if (!tf
->goto_queue_map
)
460 tf
->goto_queue_map
= pointer_map_create ();
461 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
463 slot
= pointer_map_insert (tf
->goto_queue_map
,
464 tf
->goto_queue
[i
].stmt
.g
);
465 gcc_assert (*slot
== NULL
);
466 *slot
= &tf
->goto_queue
[i
];
470 slot
= pointer_map_contains (tf
->goto_queue_map
, stmt
.g
);
472 return (((struct goto_queue_node
*) *slot
)->repl_stmt
);
477 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
478 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
479 then we can just splat it in, otherwise we add the new stmts immediately
480 after the GIMPLE_COND and redirect. */
483 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
484 gimple_stmt_iterator
*gsi
)
491 new_seq
= find_goto_replacement (tf
, temp
);
495 if (gimple_seq_singleton_p (new_seq
)
496 && gimple_code (gimple_seq_first_stmt (new_seq
)) == GIMPLE_GOTO
)
498 *tp
= gimple_goto_dest (gimple_seq_first_stmt (new_seq
));
502 label
= create_artificial_label ();
503 /* Set the new label for the GIMPLE_COND */
506 gsi_insert_after (gsi
, gimple_build_label (label
), GSI_CONTINUE_LINKING
);
507 gsi_insert_seq_after (gsi
, gimple_seq_copy (new_seq
), GSI_CONTINUE_LINKING
);
510 /* The real work of replace_goto_queue. Returns with TSI updated to
511 point to the next statement. */
513 static void replace_goto_queue_stmt_list (gimple_seq
, struct leh_tf_state
*);
516 replace_goto_queue_1 (gimple stmt
, struct leh_tf_state
*tf
,
517 gimple_stmt_iterator
*gsi
)
523 switch (gimple_code (stmt
))
528 seq
= find_goto_replacement (tf
, temp
);
531 gsi_insert_seq_before (gsi
, gimple_seq_copy (seq
), GSI_SAME_STMT
);
532 gsi_remove (gsi
, false);
538 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 2), tf
, gsi
);
539 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 3), tf
, gsi
);
543 replace_goto_queue_stmt_list (gimple_try_eval (stmt
), tf
);
544 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt
), tf
);
547 replace_goto_queue_stmt_list (gimple_catch_handler (stmt
), tf
);
549 case GIMPLE_EH_FILTER
:
550 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt
), tf
);
554 /* These won't have gotos in them. */
561 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
564 replace_goto_queue_stmt_list (gimple_seq seq
, struct leh_tf_state
*tf
)
566 gimple_stmt_iterator gsi
= gsi_start (seq
);
568 while (!gsi_end_p (gsi
))
569 replace_goto_queue_1 (gsi_stmt (gsi
), tf
, &gsi
);
572 /* Replace all goto queue members. */
575 replace_goto_queue (struct leh_tf_state
*tf
)
577 if (tf
->goto_queue_active
== 0)
579 replace_goto_queue_stmt_list (tf
->top_p_seq
, tf
);
582 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
583 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
587 record_in_goto_queue (struct leh_tf_state
*tf
,
593 struct goto_queue_node
*q
;
595 gcc_assert (!tf
->goto_queue_map
);
597 active
= tf
->goto_queue_active
;
598 size
= tf
->goto_queue_size
;
601 size
= (size
? size
* 2 : 32);
602 tf
->goto_queue_size
= size
;
604 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
607 q
= &tf
->goto_queue
[active
];
608 tf
->goto_queue_active
= active
+ 1;
610 memset (q
, 0, sizeof (*q
));
613 q
->is_label
= is_label
;
616 /* Record the LABEL label in the goto queue contained in TF.
620 record_in_goto_queue_label (struct leh_tf_state
*tf
, treemple stmt
, tree label
)
623 treemple temp
, new_stmt
;
628 /* Computed and non-local gotos do not get processed. Given
629 their nature we can neither tell whether we've escaped the
630 finally block nor redirect them if we knew. */
631 if (TREE_CODE (label
) != LABEL_DECL
)
634 /* No need to record gotos that don't leave the try block. */
636 if (!outside_finally_tree (temp
, tf
->try_finally_expr
))
639 if (! tf
->dest_array
)
641 tf
->dest_array
= VEC_alloc (tree
, heap
, 10);
642 VEC_quick_push (tree
, tf
->dest_array
, label
);
647 int n
= VEC_length (tree
, tf
->dest_array
);
648 for (index
= 0; index
< n
; ++index
)
649 if (VEC_index (tree
, tf
->dest_array
, index
) == label
)
652 VEC_safe_push (tree
, heap
, tf
->dest_array
, label
);
655 /* In the case of a GOTO we want to record the destination label,
656 since with a GIMPLE_COND we have an easy access to the then/else
659 record_in_goto_queue (tf
, new_stmt
, index
, true);
663 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
664 node, and if so record that fact in the goto queue associated with that
668 maybe_record_in_goto_queue (struct leh_state
*state
, gimple stmt
)
670 struct leh_tf_state
*tf
= state
->tf
;
676 switch (gimple_code (stmt
))
679 new_stmt
.tp
= gimple_op_ptr (stmt
, 2);
680 record_in_goto_queue_label (tf
, new_stmt
, gimple_cond_true_label (stmt
));
681 new_stmt
.tp
= gimple_op_ptr (stmt
, 3);
682 record_in_goto_queue_label (tf
, new_stmt
, gimple_cond_false_label (stmt
));
686 record_in_goto_queue_label (tf
, new_stmt
, gimple_goto_dest (stmt
));
690 tf
->may_return
= true;
692 record_in_goto_queue (tf
, new_stmt
, -1, false);
701 #ifdef ENABLE_CHECKING
702 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
703 was in fact structured, and we've not yet done jump threading, then none
704 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
707 verify_norecord_switch_expr (struct leh_state
*state
, gimple switch_expr
)
709 struct leh_tf_state
*tf
= state
->tf
;
715 n
= gimple_switch_num_labels (switch_expr
);
717 for (i
= 0; i
< n
; ++i
)
720 tree lab
= CASE_LABEL (gimple_switch_label (switch_expr
, i
));
722 gcc_assert (!outside_finally_tree (temp
, tf
->try_finally_expr
));
726 #define verify_norecord_switch_expr(state, switch_expr)
729 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
730 whatever is needed to finish the return. If MOD is non-null, insert it
731 before the new branch. RETURN_VALUE_P is a cache containing a temporary
732 variable to be used in manipulating the value returned from the function. */
735 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
,
736 tree
*return_value_p
)
741 /* In the case of a return, the queue node must be a gimple statement. */
742 gcc_assert (!q
->is_label
);
744 ret_expr
= gimple_return_retval (q
->stmt
.g
);
748 if (!*return_value_p
)
749 *return_value_p
= ret_expr
;
751 gcc_assert (*return_value_p
== ret_expr
);
752 q
->cont_stmt
= q
->stmt
.g
;
753 /* The nasty part about redirecting the return value is that the
754 return value itself is to be computed before the FINALLY block
768 should return 0, not 1. Arrange for this to happen by copying
769 computed the return value into a local temporary. This also
770 allows us to redirect multiple return statements through the
771 same destination block; whether this is a net win or not really
772 depends, I guess, but it does make generation of the switch in
773 lower_try_finally_switch easier. */
775 if (TREE_CODE (ret_expr
) == RESULT_DECL
)
777 if (!*return_value_p
)
778 *return_value_p
= ret_expr
;
780 gcc_assert (*return_value_p
== ret_expr
);
781 q
->cont_stmt
= q
->stmt
.g
;
787 /* If we don't return a value, all return statements are the same. */
788 q
->cont_stmt
= q
->stmt
.g
;
791 q
->repl_stmt
= gimple_seq_alloc ();
794 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
796 x
= gimple_build_goto (finlab
);
797 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
800 /* Similar, but easier, for GIMPLE_GOTO. */
803 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
,
804 struct leh_tf_state
*tf
)
808 gcc_assert (q
->is_label
);
810 q
->repl_stmt
= gimple_seq_alloc ();
812 q
->cont_stmt
= gimple_build_goto (VEC_index (tree
, tf
->dest_array
,q
->index
));
815 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
817 x
= gimple_build_goto (finlab
);
818 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
821 /* We want to transform
822 try { body; } catch { stuff; }
824 body; goto over; lab: stuff; over:
826 TP is a GIMPLE_TRY node. LAB is the label that
827 should be placed before the second operand, or NULL. OVER is
828 an existing label that should be put at the exit, or NULL. */
831 frob_into_branch_around (gimple tp
, tree lab
, tree over
)
834 gimple_seq cleanup
, result
;
836 cleanup
= gimple_try_cleanup (tp
);
837 result
= gimple_try_eval (tp
);
839 if (gimple_seq_may_fallthru (result
))
842 over
= create_artificial_label ();
843 x
= gimple_build_goto (over
);
844 gimple_seq_add_stmt (&result
, x
);
849 x
= gimple_build_label (lab
);
850 gimple_seq_add_stmt (&result
, x
);
853 gimple_seq_add_seq (&result
, cleanup
);
857 x
= gimple_build_label (over
);
858 gimple_seq_add_stmt (&result
, x
);
863 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
864 Make sure to record all new labels found. */
867 lower_try_finally_dup_block (gimple_seq seq
, struct leh_state
*outer_state
)
869 gimple region
= NULL
;
872 new_seq
= copy_gimple_seq_and_replace_locals (seq
);
875 region
= outer_state
->tf
->try_finally_expr
;
876 collect_finally_tree_1 (new_seq
, region
);
881 /* A subroutine of lower_try_finally. Create a fallthru label for
882 the given try_finally state. The only tricky bit here is that
883 we have to make sure to record the label in our outer context. */
886 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
888 tree label
= tf
->fallthru_label
;
893 label
= create_artificial_label ();
894 tf
->fallthru_label
= label
;
898 record_in_finally_tree (temp
, tf
->outer
->tf
->try_finally_expr
);
904 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
905 returns non-null, then the language requires that the exception path out
906 of a try_finally be treated specially. To wit: the code within the
907 finally block may not itself throw an exception. We have two choices here.
908 First we can duplicate the finally block and wrap it in a must_not_throw
909 region. Second, we can generate code like
914 if (fintmp == eh_edge)
915 protect_cleanup_actions;
918 where "fintmp" is the temporary used in the switch statement generation
919 alternative considered below. For the nonce, we always choose the first
922 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
925 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
926 struct leh_state
*this_state
,
927 struct leh_tf_state
*tf
)
929 gimple protect_cleanup_actions
;
930 gimple_stmt_iterator gsi
;
931 bool finally_may_fallthru
;
935 /* First check for nothing to do. */
936 if (lang_protect_cleanup_actions
)
937 protect_cleanup_actions
= lang_protect_cleanup_actions ();
939 protect_cleanup_actions
= NULL
;
941 finally
= gimple_try_cleanup (tf
->top_p
);
943 /* If the EH case of the finally block can fall through, this may be a
944 structure of the form
957 E.g. with an inline destructor with an embedded try block. In this
958 case we must save the runtime EH data around the nested exception.
960 This complication means that any time the previous runtime data might
961 be used (via fallthru from the finally) we handle the eh case here,
962 whether or not protect_cleanup_actions is active. */
964 finally_may_fallthru
= gimple_seq_may_fallthru (finally
);
965 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
968 /* Duplicate the FINALLY block. Only need to do this for try-finally,
969 and not for cleanups. */
971 finally
= lower_try_finally_dup_block (finally
, outer_state
);
973 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
974 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
975 to be in an enclosing scope, but needs to be implemented at this level
976 to avoid a nesting violation (see wrap_temporary_cleanups in
977 cp/decl.c). Since it's logically at an outer level, we should call
978 terminate before we get to it, so strip it away before adding the
979 MUST_NOT_THROW filter. */
980 gsi
= gsi_start (finally
);
982 if (protect_cleanup_actions
983 && gimple_code (x
) == GIMPLE_TRY
984 && gimple_try_kind (x
) == GIMPLE_TRY_CATCH
985 && gimple_try_catch_is_cleanup (x
))
987 gsi_insert_seq_before (&gsi
, gimple_try_eval (x
), GSI_SAME_STMT
);
988 gsi_remove (&gsi
, false);
991 /* Resume execution after the exception. Adding this now lets
992 lower_eh_filter not add unnecessary gotos, as it is clear that
993 we never fallthru from this copy of the finally block. */
994 if (finally_may_fallthru
)
996 tree save_eptr
, save_filt
;
999 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
1000 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
1002 gsi
= gsi_start (finally
);
1003 tmp
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
1004 x
= gimple_build_assign (save_eptr
, tmp
);
1005 gsi_insert_before (&gsi
, x
, GSI_CONTINUE_LINKING
);
1007 tmp
= build0 (FILTER_EXPR
, integer_type_node
);
1008 x
= gimple_build_assign (save_filt
, tmp
);
1009 gsi_insert_before (&gsi
, x
, GSI_CONTINUE_LINKING
);
1011 gsi
= gsi_last (finally
);
1012 tmp
= build0 (EXC_PTR_EXPR
, ptr_type_node
);
1013 x
= gimple_build_assign (tmp
, save_eptr
);
1014 gsi_insert_after (&gsi
, x
, GSI_CONTINUE_LINKING
);
1016 tmp
= build0 (FILTER_EXPR
, integer_type_node
);
1017 x
= gimple_build_assign (tmp
, save_filt
);
1018 gsi_insert_after (&gsi
, x
, GSI_CONTINUE_LINKING
);
1020 x
= gimple_build_resx (get_eh_region_number (tf
->region
));
1021 gsi_insert_after (&gsi
, x
, GSI_CONTINUE_LINKING
);
1024 /* Wrap the block with protect_cleanup_actions as the action. */
1025 if (protect_cleanup_actions
)
1027 gimple_seq seq
= NULL
, failure
= NULL
;
1029 gimple_seq_add_stmt (&failure
, protect_cleanup_actions
);
1030 x
= gimple_build_eh_filter (NULL
, failure
);
1031 gimple_eh_filter_set_must_not_throw (x
, 1);
1033 gimple_seq_add_stmt (&seq
, x
);
1034 x
= gimple_build_try (finally
, seq
, GIMPLE_TRY_CATCH
);
1035 finally
= lower_eh_filter (outer_state
, x
);
1038 lower_eh_constructs_1 (outer_state
, finally
);
1040 /* Hook this up to the end of the existing try block. If we
1041 previously fell through the end, we'll have to branch around.
1042 This means adding a new goto, and adding it to the queue. */
1044 gsi
= gsi_last (gimple_try_eval (tf
->top_p
));
1046 if (tf
->may_fallthru
)
1049 tmp
= lower_try_finally_fallthru_label (tf
);
1050 x
= gimple_build_goto (tmp
);
1051 gsi_insert_after (&gsi
, x
, GSI_CONTINUE_LINKING
);
1054 maybe_record_in_goto_queue (this_state
, x
);
1056 tf
->may_fallthru
= false;
1059 x
= gimple_build_label (tf
->eh_label
);
1060 gsi_insert_after (&gsi
, x
, GSI_CONTINUE_LINKING
);
1061 gsi_insert_seq_after (&gsi
, finally
, GSI_CONTINUE_LINKING
);
1063 /* Having now been handled, EH isn't to be considered with
1064 the rest of the outgoing edges. */
1065 tf
->may_throw
= false;
1068 /* A subroutine of lower_try_finally. We have determined that there is
1069 no fallthru edge out of the finally block. This means that there is
1070 no outgoing edge corresponding to any incoming edge. Restructure the
1071 try_finally node for this special case. */
1074 lower_try_finally_nofallthru (struct leh_state
*state
,
1075 struct leh_tf_state
*tf
)
1077 tree lab
, return_val
;
1080 struct goto_queue_node
*q
, *qe
;
1085 lab
= create_artificial_label ();
1087 /* We expect that tf->top_p is a GIMPLE_TRY. */
1088 finally
= gimple_try_cleanup (tf
->top_p
);
1089 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1091 x
= gimple_build_label (lab
);
1092 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1096 qe
= q
+ tf
->goto_queue_active
;
1099 do_return_redirection (q
, lab
, NULL
, &return_val
);
1101 do_goto_redirection (q
, lab
, NULL
, tf
);
1103 replace_goto_queue (tf
);
1105 lower_eh_constructs_1 (state
, finally
);
1106 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1109 /* A subroutine of lower_try_finally. We have determined that there is
1110 exactly one destination of the finally block. Restructure the
1111 try_finally node for this special case. */
1114 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
1116 struct goto_queue_node
*q
, *qe
;
1121 finally
= gimple_try_cleanup (tf
->top_p
);
1122 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1124 lower_eh_constructs_1 (state
, finally
);
1128 /* Only reachable via the exception edge. Add the given label to
1129 the head of the FINALLY block. Append a RESX at the end. */
1131 x
= gimple_build_label (tf
->eh_label
);
1132 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1134 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1136 x
= gimple_build_resx (get_eh_region_number (tf
->region
));
1138 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1143 if (tf
->may_fallthru
)
1145 /* Only reachable via the fallthru edge. Do nothing but let
1146 the two blocks run together; we'll fall out the bottom. */
1147 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1151 finally_label
= create_artificial_label ();
1152 x
= gimple_build_label (finally_label
);
1153 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1155 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1158 qe
= q
+ tf
->goto_queue_active
;
1162 /* Reachable by return expressions only. Redirect them. */
1163 tree return_val
= NULL
;
1165 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
1166 replace_goto_queue (tf
);
1170 /* Reachable by goto expressions only. Redirect them. */
1172 do_goto_redirection (q
, finally_label
, NULL
, tf
);
1173 replace_goto_queue (tf
);
1175 if (VEC_index (tree
, tf
->dest_array
, 0) == tf
->fallthru_label
)
1177 /* Reachable by goto to fallthru label only. Redirect it
1178 to the new label (already created, sadly), and do not
1179 emit the final branch out, or the fallthru label. */
1180 tf
->fallthru_label
= NULL
;
1185 /* Place the original return/goto to the original destination
1186 immediately after the finally block. */
1187 x
= tf
->goto_queue
[0].cont_stmt
;
1188 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1189 maybe_record_in_goto_queue (state
, x
);
1192 /* A subroutine of lower_try_finally. There are multiple edges incoming
1193 and outgoing from the finally block. Implement this by duplicating the
1194 finally block for every destination. */
1197 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1200 gimple_seq new_stmt
;
1205 finally
= gimple_try_cleanup (tf
->top_p
);
1206 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1209 if (tf
->may_fallthru
)
1211 seq
= lower_try_finally_dup_block (finally
, state
);
1212 lower_eh_constructs_1 (state
, seq
);
1213 gimple_seq_add_seq (&new_stmt
, seq
);
1215 tmp
= lower_try_finally_fallthru_label (tf
);
1216 x
= gimple_build_goto (tmp
);
1217 gimple_seq_add_stmt (&new_stmt
, x
);
1222 x
= gimple_build_label (tf
->eh_label
);
1223 gimple_seq_add_stmt (&new_stmt
, x
);
1225 seq
= lower_try_finally_dup_block (finally
, state
);
1226 lower_eh_constructs_1 (state
, seq
);
1227 gimple_seq_add_seq (&new_stmt
, seq
);
1229 x
= gimple_build_resx (get_eh_region_number (tf
->region
));
1230 gimple_seq_add_stmt (&new_stmt
, x
);
1235 struct goto_queue_node
*q
, *qe
;
1236 tree return_val
= NULL
;
1237 int return_index
, index
;
1240 struct goto_queue_node
*q
;
1244 return_index
= VEC_length (tree
, tf
->dest_array
);
1245 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1248 qe
= q
+ tf
->goto_queue_active
;
1251 index
= q
->index
< 0 ? return_index
: q
->index
;
1253 if (!labels
[index
].q
)
1254 labels
[index
].q
= q
;
1257 for (index
= 0; index
< return_index
+ 1; index
++)
1261 q
= labels
[index
].q
;
1265 lab
= labels
[index
].label
= create_artificial_label ();
1267 if (index
== return_index
)
1268 do_return_redirection (q
, lab
, NULL
, &return_val
);
1270 do_goto_redirection (q
, lab
, NULL
, tf
);
1272 x
= gimple_build_label (lab
);
1273 gimple_seq_add_stmt (&new_stmt
, x
);
1275 seq
= lower_try_finally_dup_block (finally
, state
);
1276 lower_eh_constructs_1 (state
, seq
);
1277 gimple_seq_add_seq (&new_stmt
, seq
);
1279 gimple_seq_add_stmt (&new_stmt
, q
->cont_stmt
);
1280 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1283 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1287 index
= q
->index
< 0 ? return_index
: q
->index
;
1289 if (labels
[index
].q
== q
)
1292 lab
= labels
[index
].label
;
1294 if (index
== return_index
)
1295 do_return_redirection (q
, lab
, NULL
, &return_val
);
1297 do_goto_redirection (q
, lab
, NULL
, tf
);
1300 replace_goto_queue (tf
);
1304 /* Need to link new stmts after running replace_goto_queue due
1305 to not wanting to process the same goto stmts twice. */
1306 gimple_seq_add_seq (&tf
->top_p_seq
, new_stmt
);
1309 /* A subroutine of lower_try_finally. There are multiple edges incoming
1310 and outgoing from the finally block. Implement this by instrumenting
1311 each incoming edge and creating a switch statement at the end of the
1312 finally block that branches to the appropriate destination. */
1315 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1317 struct goto_queue_node
*q
, *qe
;
1318 tree return_val
= NULL
;
1319 tree finally_tmp
, finally_label
;
1320 int return_index
, eh_index
, fallthru_index
;
1321 int nlabels
, ndests
, j
, last_case_index
;
1323 VEC (tree
,heap
) *case_label_vec
;
1324 gimple_seq switch_body
;
1329 struct pointer_map_t
*cont_map
= NULL
;
1331 switch_body
= gimple_seq_alloc ();
1333 /* Mash the TRY block to the head of the chain. */
1334 finally
= gimple_try_cleanup (tf
->top_p
);
1335 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1337 /* Lower the finally block itself. */
1338 lower_eh_constructs_1 (state
, finally
);
1340 /* Prepare for switch statement generation. */
1341 nlabels
= VEC_length (tree
, tf
->dest_array
);
1342 return_index
= nlabels
;
1343 eh_index
= return_index
+ tf
->may_return
;
1344 fallthru_index
= eh_index
+ tf
->may_throw
;
1345 ndests
= fallthru_index
+ tf
->may_fallthru
;
1347 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1348 finally_label
= create_artificial_label ();
1350 /* We use VEC_quick_push on case_label_vec throughout this function,
1351 since we know the size in advance and allocate precisely as muce
1353 case_label_vec
= VEC_alloc (tree
, heap
, ndests
);
1355 last_case_index
= 0;
1357 /* Begin inserting code for getting to the finally block. Things
1358 are done in this order to correspond to the sequence the code is
1361 if (tf
->may_fallthru
)
1363 x
= gimple_build_assign (finally_tmp
, build_int_cst (integer_type_node
,
1365 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1369 x
= gimple_build_goto (finally_label
);
1370 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1374 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1375 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1376 create_artificial_label ());
1377 VEC_quick_push (tree
, case_label_vec
, last_case
);
1380 x
= gimple_build_label (CASE_LABEL (last_case
));
1381 gimple_seq_add_stmt (&switch_body
, x
);
1383 tmp
= lower_try_finally_fallthru_label (tf
);
1384 x
= gimple_build_goto (tmp
);
1385 gimple_seq_add_stmt (&switch_body
, x
);
1390 x
= gimple_build_label (tf
->eh_label
);
1391 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1393 x
= gimple_build_assign (finally_tmp
, build_int_cst (integer_type_node
,
1395 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1397 last_case
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1398 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1399 create_artificial_label ());
1400 VEC_quick_push (tree
, case_label_vec
, last_case
);
1403 x
= gimple_build_label (CASE_LABEL (last_case
));
1404 gimple_seq_add_stmt (&switch_body
, x
);
1405 x
= gimple_build_resx (get_eh_region_number (tf
->region
));
1406 gimple_seq_add_stmt (&switch_body
, x
);
1409 x
= gimple_build_label (finally_label
);
1410 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1412 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1414 /* Redirect each incoming goto edge. */
1416 qe
= q
+ tf
->goto_queue_active
;
1417 j
= last_case_index
+ tf
->may_return
;
1418 /* Prepare the assignments to finally_tmp that are executed upon the
1419 entrance through a particular edge. */
1424 unsigned int case_index
;
1426 mod
= gimple_seq_alloc ();
1430 x
= gimple_build_assign (finally_tmp
,
1431 build_int_cst (integer_type_node
,
1433 gimple_seq_add_stmt (&mod
, x
);
1434 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1435 switch_id
= return_index
;
1439 x
= gimple_build_assign (finally_tmp
,
1440 build_int_cst (integer_type_node
, q
->index
));
1441 gimple_seq_add_stmt (&mod
, x
);
1442 do_goto_redirection (q
, finally_label
, mod
, tf
);
1443 switch_id
= q
->index
;
1446 case_index
= j
+ q
->index
;
1447 if (VEC_length (tree
, case_label_vec
) <= case_index
1448 || !VEC_index (tree
, case_label_vec
, case_index
))
1452 case_lab
= build3 (CASE_LABEL_EXPR
, void_type_node
,
1453 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1455 /* We store the cont_stmt in the pointer map, so that we can recover
1456 it in the loop below. We don't create the new label while
1457 walking the goto_queue because pointers don't offer a stable
1460 cont_map
= pointer_map_create ();
1461 slot
= pointer_map_insert (cont_map
, case_lab
);
1462 *slot
= q
->cont_stmt
;
1463 VEC_quick_push (tree
, case_label_vec
, case_lab
);
1466 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1472 last_case
= VEC_index (tree
, case_label_vec
, j
);
1474 gcc_assert (last_case
);
1475 gcc_assert (cont_map
);
1477 slot
= pointer_map_contains (cont_map
, last_case
);
1478 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1479 placeholder, it does not store an actual label, yet. */
1481 cont_stmt
= *(gimple
*) slot
;
1483 label
= create_artificial_label ();
1484 CASE_LABEL (last_case
) = label
;
1486 x
= gimple_build_label (label
);
1487 gimple_seq_add_stmt (&switch_body
, x
);
1488 gimple_seq_add_stmt (&switch_body
, cont_stmt
);
1489 maybe_record_in_goto_queue (state
, cont_stmt
);
1492 pointer_map_destroy (cont_map
);
1494 replace_goto_queue (tf
);
1496 /* Make sure that the last case is the default label, as one is required.
1497 Then sort the labels, which is also required in GIMPLE. */
1498 CASE_LOW (last_case
) = NULL
;
1499 sort_case_labels (case_label_vec
);
1501 /* Build the switch statement, setting last_case to be the default
1503 switch_stmt
= gimple_build_switch_vec (finally_tmp
, last_case
,
1506 /* Need to link SWITCH_STMT after running replace_goto_queue
1507 due to not wanting to process the same goto stmts twice. */
1508 gimple_seq_add_stmt (&tf
->top_p_seq
, switch_stmt
);
1509 gimple_seq_add_seq (&tf
->top_p_seq
, switch_body
);
1512 /* Decide whether or not we are going to duplicate the finally block.
1513 There are several considerations.
1515 First, if this is Java, then the finally block contains code
1516 written by the user. It has line numbers associated with it,
1517 so duplicating the block means it's difficult to set a breakpoint.
1518 Since controlling code generation via -g is verboten, we simply
1519 never duplicate code without optimization.
1521 Second, we'd like to prevent egregious code growth. One way to
1522 do this is to estimate the size of the finally block, multiply
1523 that by the number of copies we'd need to make, and compare against
1524 the estimate of the size of the switch machinery we'd have to add. */
1527 decide_copy_try_finally (int ndests
, gimple_seq finally
)
1529 int f_estimate
, sw_estimate
;
1534 /* Finally estimate N times, plus N gotos. */
1535 f_estimate
= count_insns_seq (finally
, &eni_size_weights
);
1536 f_estimate
= (f_estimate
+ 1) * ndests
;
1538 /* Switch statement (cost 10), N variable assignments, N gotos. */
1539 sw_estimate
= 10 + 2 * ndests
;
1541 /* Optimize for size clearly wants our best guess. */
1542 if (optimize_function_for_size_p (cfun
))
1543 return f_estimate
< sw_estimate
;
1545 /* ??? These numbers are completely made up so far. */
1547 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1549 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1553 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1554 to a sequence of labels and blocks, plus the exception region trees
1555 that record all the magic. This is complicated by the need to
1556 arrange for the FINALLY block to be executed on all exits. */
1559 lower_try_finally (struct leh_state
*state
, gimple tp
)
1561 struct leh_tf_state this_tf
;
1562 struct leh_state this_state
;
1565 /* Process the try block. */
1567 memset (&this_tf
, 0, sizeof (this_tf
));
1568 this_tf
.try_finally_expr
= tp
;
1570 this_tf
.outer
= state
;
1571 if (using_eh_for_cleanups_p
)
1573 = gen_eh_region_cleanup (state
->cur_region
);
1575 this_tf
.region
= NULL
;
1577 this_state
.cur_region
= this_tf
.region
;
1578 this_state
.tf
= &this_tf
;
1580 lower_eh_constructs_1 (&this_state
, gimple_try_eval(tp
));
1582 /* Determine if the try block is escaped through the bottom. */
1583 this_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1585 /* Determine if any exceptions are possible within the try block. */
1586 if (using_eh_for_cleanups_p
)
1587 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1588 if (this_tf
.may_throw
)
1590 this_tf
.eh_label
= create_artificial_label ();
1591 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1592 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1595 /* Determine how many edges (still) reach the finally block. Or rather,
1596 how many destinations are reached by the finally block. Use this to
1597 determine how we process the finally block itself. */
1599 ndests
= VEC_length (tree
, this_tf
.dest_array
);
1600 ndests
+= this_tf
.may_fallthru
;
1601 ndests
+= this_tf
.may_return
;
1602 ndests
+= this_tf
.may_throw
;
1604 /* If the FINALLY block is not reachable, dike it out. */
1607 gimple_seq_add_seq (&this_tf
.top_p_seq
, gimple_try_eval (tp
));
1608 gimple_try_set_cleanup (tp
, NULL
);
1610 /* If the finally block doesn't fall through, then any destination
1611 we might try to impose there isn't reached either. There may be
1612 some minor amount of cleanup and redirection still needed. */
1613 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp
)))
1614 lower_try_finally_nofallthru (state
, &this_tf
);
1616 /* We can easily special-case redirection to a single destination. */
1617 else if (ndests
== 1)
1618 lower_try_finally_onedest (state
, &this_tf
);
1619 else if (decide_copy_try_finally (ndests
, gimple_try_cleanup (tp
)))
1620 lower_try_finally_copy (state
, &this_tf
);
1622 lower_try_finally_switch (state
, &this_tf
);
1624 /* If someone requested we add a label at the end of the transformed
1626 if (this_tf
.fallthru_label
)
1628 /* This must be reached only if ndests == 0. */
1629 gimple x
= gimple_build_label (this_tf
.fallthru_label
);
1630 gimple_seq_add_stmt (&this_tf
.top_p_seq
, x
);
1633 VEC_free (tree
, heap
, this_tf
.dest_array
);
1634 if (this_tf
.goto_queue
)
1635 free (this_tf
.goto_queue
);
1636 if (this_tf
.goto_queue_map
)
1637 pointer_map_destroy (this_tf
.goto_queue_map
);
1639 return this_tf
.top_p_seq
;
1642 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1643 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1644 exception region trees that records all the magic. */
1647 lower_catch (struct leh_state
*state
, gimple tp
)
1649 struct eh_region_d
*try_region
;
1650 struct leh_state this_state
;
1651 gimple_stmt_iterator gsi
;
1654 try_region
= gen_eh_region_try (state
->cur_region
);
1655 this_state
.cur_region
= try_region
;
1656 this_state
.tf
= state
->tf
;
1658 lower_eh_constructs_1 (&this_state
, gimple_try_eval (tp
));
1660 if (!get_eh_region_may_contain_throw (try_region
))
1662 return gimple_try_eval (tp
);
1666 for (gsi
= gsi_start (gimple_try_cleanup (tp
)); !gsi_end_p (gsi
); )
1668 struct eh_region_d
*catch_region
;
1672 gcatch
= gsi_stmt (gsi
);
1673 catch_region
= gen_eh_region_catch (try_region
,
1674 gimple_catch_types (gcatch
));
1676 this_state
.cur_region
= catch_region
;
1677 lower_eh_constructs_1 (&this_state
, gimple_catch_handler (gcatch
));
1679 eh_label
= create_artificial_label ();
1680 set_eh_region_tree_label (catch_region
, eh_label
);
1682 x
= gimple_build_label (eh_label
);
1683 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
1685 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch
)))
1688 out_label
= create_artificial_label ();
1690 x
= gimple_build_goto (out_label
);
1691 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch
), x
);
1694 gsi_insert_seq_before (&gsi
, gimple_catch_handler (gcatch
),
1696 gsi_remove (&gsi
, false);
1699 return frob_into_branch_around (tp
, NULL
, out_label
);
1702 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1703 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1704 region trees that record all the magic. */
1707 lower_eh_filter (struct leh_state
*state
, gimple tp
)
1709 struct leh_state this_state
;
1710 struct eh_region_d
*this_region
;
1714 inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1716 if (gimple_eh_filter_must_not_throw (inner
))
1717 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1719 this_region
= gen_eh_region_allowed (state
->cur_region
,
1720 gimple_eh_filter_types (inner
));
1721 this_state
= *state
;
1722 this_state
.cur_region
= this_region
;
1724 lower_eh_constructs_1 (&this_state
, gimple_try_eval (tp
));
1726 if (!get_eh_region_may_contain_throw (this_region
))
1728 return gimple_try_eval (tp
);
1731 lower_eh_constructs_1 (state
, gimple_eh_filter_failure (inner
));
1732 gimple_try_set_cleanup (tp
, gimple_eh_filter_failure (inner
));
1734 eh_label
= create_artificial_label ();
1735 set_eh_region_tree_label (this_region
, eh_label
);
1737 return frob_into_branch_around (tp
, eh_label
, NULL
);
1740 /* Implement a cleanup expression. This is similar to try-finally,
1741 except that we only execute the cleanup block for exception edges. */
1744 lower_cleanup (struct leh_state
*state
, gimple tp
)
1746 struct leh_state this_state
;
1747 struct eh_region_d
*this_region
;
1748 struct leh_tf_state fake_tf
;
1751 /* If not using eh, then exception-only cleanups are no-ops. */
1752 if (!flag_exceptions
)
1754 result
= gimple_try_eval (tp
);
1755 lower_eh_constructs_1 (state
, result
);
1759 this_region
= gen_eh_region_cleanup (state
->cur_region
);
1760 this_state
= *state
;
1761 this_state
.cur_region
= this_region
;
1763 lower_eh_constructs_1 (&this_state
, gimple_try_eval (tp
));
1765 if (!get_eh_region_may_contain_throw (this_region
))
1767 return gimple_try_eval (tp
);
1770 /* Build enough of a try-finally state so that we can reuse
1771 honor_protect_cleanup_actions. */
1772 memset (&fake_tf
, 0, sizeof (fake_tf
));
1774 fake_tf
.outer
= state
;
1775 fake_tf
.region
= this_region
;
1776 fake_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1777 fake_tf
.may_throw
= true;
1779 fake_tf
.eh_label
= create_artificial_label ();
1780 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1782 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1784 if (fake_tf
.may_throw
)
1786 /* In this case honor_protect_cleanup_actions had nothing to do,
1787 and we should process this normally. */
1788 lower_eh_constructs_1 (state
, gimple_try_cleanup (tp
));
1789 result
= frob_into_branch_around (tp
, fake_tf
.eh_label
,
1790 fake_tf
.fallthru_label
);
1794 /* In this case honor_protect_cleanup_actions did nearly all of
1795 the work. All we have left is to append the fallthru_label. */
1797 result
= gimple_try_eval (tp
);
1798 if (fake_tf
.fallthru_label
)
1800 gimple x
= gimple_build_label (fake_tf
.fallthru_label
);
1801 gimple_seq_add_stmt (&result
, x
);
1809 /* Main loop for lowering eh constructs. Also moves gsi to the next
1813 lower_eh_constructs_2 (struct leh_state
*state
, gimple_stmt_iterator
*gsi
)
1817 gimple stmt
= gsi_stmt (*gsi
);
1819 switch (gimple_code (stmt
))
1823 /* If the stmt can throw use a new temporary for the assignment
1824 to a LHS. This makes sure the old value of the LHS is
1825 available on the EH edge. */
1826 if (stmt_could_throw_p (stmt
)
1827 && gimple_has_lhs (stmt
)
1828 && !tree_could_throw_p (gimple_get_lhs (stmt
))
1829 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
1831 tree lhs
= gimple_get_lhs (stmt
);
1832 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1833 gimple s
= gimple_build_assign (lhs
, tmp
);
1834 gimple_set_location (s
, gimple_location (stmt
));
1835 gimple_set_block (s
, gimple_block (stmt
));
1836 gimple_set_lhs (stmt
, tmp
);
1837 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
1838 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
1839 DECL_GIMPLE_REG_P (tmp
) = 1;
1840 gsi_insert_after (gsi
, s
, GSI_SAME_STMT
);
1842 /* Look for things that can throw exceptions, and record them. */
1843 if (state
->cur_region
&& stmt_could_throw_p (stmt
))
1845 record_stmt_eh_region (state
->cur_region
, stmt
);
1846 note_eh_region_may_contain_throw (state
->cur_region
);
1853 maybe_record_in_goto_queue (state
, stmt
);
1857 verify_norecord_switch_expr (state
, stmt
);
1861 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
1862 replace
= lower_try_finally (state
, stmt
);
1865 x
= gimple_seq_first_stmt (gimple_try_cleanup (stmt
));
1866 switch (gimple_code (x
))
1869 replace
= lower_catch (state
, stmt
);
1871 case GIMPLE_EH_FILTER
:
1872 replace
= lower_eh_filter (state
, stmt
);
1875 replace
= lower_cleanup (state
, stmt
);
1880 /* Remove the old stmt and insert the transformed sequence
1882 gsi_insert_seq_before (gsi
, replace
, GSI_SAME_STMT
);
1883 gsi_remove (gsi
, true);
1885 /* Return since we don't want gsi_next () */
1889 /* A type, a decl, or some kind of statement that we're not
1890 interested in. Don't walk them. */
1897 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1900 lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq seq
)
1902 gimple_stmt_iterator gsi
;
1903 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
);)
1904 lower_eh_constructs_2 (state
, &gsi
);
1908 lower_eh_constructs (void)
1910 struct leh_state null_state
;
1912 gimple_seq bodyp
= gimple_body (current_function_decl
);
1914 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1916 collect_finally_tree_1 (bodyp
, NULL
);
1918 memset (&null_state
, 0, sizeof (null_state
));
1919 lower_eh_constructs_1 (&null_state
, bodyp
);
1921 htab_delete (finally_tree
);
1923 collect_eh_region_array ();
1927 struct gimple_opt_pass pass_lower_eh
=
1933 lower_eh_constructs
, /* execute */
1936 0, /* static_pass_number */
1937 TV_TREE_EH
, /* tv_id */
1938 PROP_gimple_lcf
, /* properties_required */
1939 PROP_gimple_leh
, /* properties_provided */
1940 0, /* properties_destroyed */
1941 0, /* todo_flags_start */
1942 TODO_dump_func
/* todo_flags_finish */
1947 /* Construct EH edges for STMT. */
1950 make_eh_edge (struct eh_region_d
*region
, void *data
)
1954 basic_block src
, dst
;
1956 stmt
= (gimple
) data
;
1957 lab
= get_eh_region_tree_label (region
);
1959 src
= gimple_bb (stmt
);
1960 dst
= label_to_block (lab
);
1962 make_edge (src
, dst
, EDGE_EH
);
1965 /* See if STMT is call that might be inlined. */
1968 inlinable_call_p (gimple stmt
)
1971 if (gimple_code (stmt
) != GIMPLE_CALL
)
1973 if (cfun
->after_inlining
)
1975 /* Indirect calls can be propagated to direct call
1977 decl
= gimple_call_fndecl (stmt
);
1980 if (cgraph_function_flags_ready
1981 && cgraph_function_body_availability (cgraph_node (decl
))
1982 < AVAIL_OVERWRITABLE
)
1984 return !DECL_UNINLINABLE (decl
);
1988 make_eh_edges (gimple stmt
)
1992 bool inlinable
= false;
1995 if (gimple_code (stmt
) == GIMPLE_RESX
)
1997 region_nr
= gimple_resx_region (stmt
);
2002 region_nr
= lookup_stmt_eh_region (stmt
);
2006 inlinable
= inlinable_call_p (stmt
);
2009 foreach_reachable_handler (region_nr
, is_resx
, inlinable
, make_eh_edge
, stmt
);
2011 /* Make CFG profile more consistent assuming that exception will resume to first
2012 available EH handler. In practice this makes little difference, but we get
2013 fewer consistency errors in the dumps. */
2014 bb
= gimple_bb (stmt
);
2015 if (is_resx
&& EDGE_COUNT (bb
->succs
))
2016 EDGE_SUCC (bb
, 0)->probability
= REG_BR_PROB_BASE
;
2019 /* Redirect EH edge E to NEW_BB. */
2022 redirect_eh_edge (edge e
, basic_block new_bb
)
2024 gimple stmt
= gsi_stmt (gsi_last_bb (e
->src
));
2025 int region_nr
, new_region_nr
;
2027 bool inlinable
= false;
2028 tree label
= gimple_block_label (new_bb
);
2029 struct eh_region_d
*r
;
2031 if (gimple_code (stmt
) == GIMPLE_RESX
)
2033 region_nr
= gimple_resx_region (stmt
);
2038 region_nr
= lookup_stmt_eh_region (stmt
);
2039 gcc_assert (region_nr
>= 0);
2041 inlinable
= inlinable_call_p (stmt
);
2044 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2045 fprintf (dump_file
, "Redirecting EH edge %i->%i to %i, region %i, resx %i\n",
2046 e
->src
->index
, e
->dest
->index
, new_bb
->index
, region_nr
, is_resx
);
2047 r
= redirect_eh_edge_to_label (e
, label
, is_resx
, inlinable
, region_nr
);
2048 new_region_nr
= get_eh_region_number (r
);
2049 if (new_region_nr
!= region_nr
)
2052 gimple_resx_set_region (stmt
, new_region_nr
);
2055 remove_stmt_from_eh_region (stmt
);
2056 add_stmt_to_eh_region (stmt
, new_region_nr
);
2059 e
= ssa_redirect_edge (e
, new_bb
);
2063 static bool mark_eh_edge_found_error
;
2065 /* Mark edge make_eh_edge would create for given region by setting it aux
2066 field, output error if something goes wrong. */
2069 mark_eh_edge (struct eh_region_d
*region
, void *data
)
2073 basic_block src
, dst
;
2076 stmt
= (gimple
) data
;
2077 lab
= get_eh_region_tree_label (region
);
2079 src
= gimple_bb (stmt
);
2080 dst
= label_to_block (lab
);
2082 e
= find_edge (src
, dst
);
2085 error ("EH edge %i->%i is missing", src
->index
, dst
->index
);
2086 mark_eh_edge_found_error
= true;
2088 else if (!(e
->flags
& EDGE_EH
))
2090 error ("EH edge %i->%i miss EH flag", src
->index
, dst
->index
);
2091 mark_eh_edge_found_error
= true;
2095 /* ??? might not be mistake. */
2096 error ("EH edge %i->%i has duplicated regions", src
->index
, dst
->index
);
2097 mark_eh_edge_found_error
= true;
2103 /* Verify that BB containing STMT as the last statement, has precisely the
2104 edges that make_eh_edges would create. */
2107 verify_eh_edges (gimple stmt
)
2111 basic_block bb
= gimple_bb (stmt
);
2114 bool inlinable
= false;
2116 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2117 gcc_assert (!e
->aux
);
2118 mark_eh_edge_found_error
= false;
2119 if (gimple_code (stmt
) == GIMPLE_RESX
)
2121 region_nr
= gimple_resx_region (stmt
);
2126 region_nr
= lookup_stmt_eh_region (stmt
);
2129 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2130 if (e
->flags
& EDGE_EH
)
2132 error ("BB %i can not throw but has EH edges", bb
->index
);
2137 if (!stmt_could_throw_p (stmt
))
2139 error ("BB %i last statement has incorrectly set region", bb
->index
);
2142 inlinable
= inlinable_call_p (stmt
);
2146 foreach_reachable_handler (region_nr
, is_resx
, inlinable
, mark_eh_edge
, stmt
);
2147 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2149 if ((e
->flags
& EDGE_EH
) && !e
->aux
)
2151 error ("unnecessary EH edge %i->%i", bb
->index
, e
->dest
->index
);
2152 mark_eh_edge_found_error
= true;
2158 return mark_eh_edge_found_error
;
2162 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2165 operation_could_trap_helper_p (enum tree_code op
,
2176 case TRUNC_DIV_EXPR
:
2178 case FLOOR_DIV_EXPR
:
2179 case ROUND_DIV_EXPR
:
2180 case EXACT_DIV_EXPR
:
2182 case FLOOR_MOD_EXPR
:
2183 case ROUND_MOD_EXPR
:
2184 case TRUNC_MOD_EXPR
:
2186 if (honor_snans
|| honor_trapv
)
2189 return flag_trapping_math
;
2190 if (!TREE_CONSTANT (divisor
) || integer_zerop (divisor
))
2199 /* Some floating point comparisons may trap. */
2204 case UNORDERED_EXPR
:
2214 case FIX_TRUNC_EXPR
:
2215 /* Conversion of floating point might trap. */
2221 /* These operations don't trap with floating point. */
2229 /* Any floating arithmetic may trap. */
2230 if (fp_operation
&& flag_trapping_math
)
2237 /* Any floating arithmetic may trap. */
2238 if (fp_operation
&& flag_trapping_math
)
2246 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2247 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2248 type operands that may trap. If OP is a division operator, DIVISOR contains
2249 the value of the divisor. */
2252 operation_could_trap_p (enum tree_code op
, bool fp_operation
, bool honor_trapv
,
2255 bool honor_nans
= (fp_operation
&& flag_trapping_math
2256 && !flag_finite_math_only
);
2257 bool honor_snans
= fp_operation
&& flag_signaling_nans
!= 0;
2260 if (TREE_CODE_CLASS (op
) != tcc_comparison
2261 && TREE_CODE_CLASS (op
) != tcc_unary
2262 && TREE_CODE_CLASS (op
) != tcc_binary
)
2265 return operation_could_trap_helper_p (op
, fp_operation
, honor_trapv
,
2266 honor_nans
, honor_snans
, divisor
,
2270 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2271 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2272 This routine expects only GIMPLE lhs or rhs input. */
2275 tree_could_trap_p (tree expr
)
2277 enum tree_code code
;
2278 bool fp_operation
= false;
2279 bool honor_trapv
= false;
2280 tree t
, base
, div
= NULL_TREE
;
2285 code
= TREE_CODE (expr
);
2286 t
= TREE_TYPE (expr
);
2290 if (COMPARISON_CLASS_P (expr
))
2291 fp_operation
= FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2293 fp_operation
= FLOAT_TYPE_P (t
);
2294 honor_trapv
= INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
);
2297 if (TREE_CODE_CLASS (code
) == tcc_binary
)
2298 div
= TREE_OPERAND (expr
, 1);
2299 if (operation_could_trap_p (code
, fp_operation
, honor_trapv
, div
))
2305 case TARGET_MEM_REF
:
2306 /* For TARGET_MEM_REFs use the information based on the original
2308 expr
= TMR_ORIGINAL (expr
);
2309 code
= TREE_CODE (expr
);
2316 case VIEW_CONVERT_EXPR
:
2317 case WITH_SIZE_EXPR
:
2318 expr
= TREE_OPERAND (expr
, 0);
2319 code
= TREE_CODE (expr
);
2322 case ARRAY_RANGE_REF
:
2323 base
= TREE_OPERAND (expr
, 0);
2324 if (tree_could_trap_p (base
))
2327 if (TREE_THIS_NOTRAP (expr
))
2330 return !range_in_array_bounds_p (expr
);
2333 base
= TREE_OPERAND (expr
, 0);
2334 if (tree_could_trap_p (base
))
2337 if (TREE_THIS_NOTRAP (expr
))
2340 return !in_array_bounds_p (expr
);
2343 case ALIGN_INDIRECT_REF
:
2344 case MISALIGNED_INDIRECT_REF
:
2345 return !TREE_THIS_NOTRAP (expr
);
2348 return TREE_THIS_VOLATILE (expr
);
2352 t
= get_callee_fndecl (expr
);
2353 /* Assume that calls to weak functions may trap. */
2354 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
2364 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2365 an assignment or a conditional) may throw. */
2368 stmt_could_throw_1_p (gimple stmt
)
2370 enum tree_code code
= gimple_expr_code (stmt
);
2371 bool honor_nans
= false;
2372 bool honor_snans
= false;
2373 bool fp_operation
= false;
2374 bool honor_trapv
= false;
2379 if (TREE_CODE_CLASS (code
) == tcc_comparison
2380 || TREE_CODE_CLASS (code
) == tcc_unary
2381 || TREE_CODE_CLASS (code
) == tcc_binary
)
2383 t
= gimple_expr_type (stmt
);
2384 fp_operation
= FLOAT_TYPE_P (t
);
2387 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
2388 honor_snans
= flag_signaling_nans
!= 0;
2390 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
2394 /* Check if the main expression may trap. */
2395 t
= is_gimple_assign (stmt
) ? gimple_assign_rhs2 (stmt
) : NULL
;
2396 ret
= operation_could_trap_helper_p (code
, fp_operation
, honor_trapv
,
2397 honor_nans
, honor_snans
, t
,
2402 /* If the expression does not trap, see if any of the individual operands may
2404 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2405 if (tree_could_trap_p (gimple_op (stmt
, i
)))
2412 /* Return true if statement STMT could throw an exception. */
2415 stmt_could_throw_p (gimple stmt
)
2417 enum gimple_code code
;
2419 if (!flag_exceptions
)
2422 /* The only statements that can throw an exception are assignments,
2423 conditionals, calls and asms. */
2424 code
= gimple_code (stmt
);
2425 if (code
!= GIMPLE_ASSIGN
2426 && code
!= GIMPLE_COND
2427 && code
!= GIMPLE_CALL
2428 && code
!= GIMPLE_ASM
)
2431 /* If exceptions can only be thrown by function calls and STMT is not a
2432 GIMPLE_CALL, the statement cannot throw. */
2433 if (!flag_non_call_exceptions
&& code
!= GIMPLE_CALL
)
2436 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_COND
)
2437 return stmt_could_throw_1_p (stmt
);
2438 else if (is_gimple_call (stmt
))
2439 return (gimple_call_flags (stmt
) & ECF_NOTHROW
) == 0;
2440 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2441 return (gimple_asm_volatile_p (stmt
));
2449 /* Return true if expression T could throw an exception. */
2452 tree_could_throw_p (tree t
)
2454 if (!flag_exceptions
)
2456 if (TREE_CODE (t
) == MODIFY_EXPR
)
2458 if (flag_non_call_exceptions
2459 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2461 t
= TREE_OPERAND (t
, 1);
2464 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2465 t
= TREE_OPERAND (t
, 0);
2466 if (TREE_CODE (t
) == CALL_EXPR
)
2467 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2468 if (flag_non_call_exceptions
)
2469 return tree_could_trap_p (t
);
2473 /* Return true if STMT can throw an exception that is not caught within
2474 the current function (CFUN). */
2477 stmt_can_throw_external (gimple stmt
)
2480 bool is_resx
= false;
2481 bool inlinable_call
= false;
2483 if (!stmt_could_throw_p (stmt
))
2486 if (gimple_code (stmt
) == GIMPLE_RESX
)
2488 region_nr
= gimple_resx_region (stmt
);
2492 region_nr
= lookup_stmt_eh_region (stmt
);
2497 return can_throw_external_1 (region_nr
, is_resx
, inlinable_call
);
2500 /* Return true if STMT can throw an exception that is caught within
2501 the current function (CFUN). */
2504 stmt_can_throw_internal (gimple stmt
)
2507 bool is_resx
= false;
2508 bool inlinable_call
= false;
2510 if (gimple_code (stmt
) == GIMPLE_RESX
)
2512 region_nr
= gimple_resx_region (stmt
);
2517 region_nr
= lookup_stmt_eh_region (stmt
);
2518 inlinable_call
= inlinable_call_p (stmt
);
2524 return can_throw_internal_1 (region_nr
, is_resx
, inlinable_call
);
2528 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2529 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2530 in the table if it should be in there. Return TRUE if a replacement was
2531 done that my require an EH edge purge. */
2534 maybe_clean_or_replace_eh_stmt (gimple old_stmt
, gimple new_stmt
)
2536 int region_nr
= lookup_stmt_eh_region (old_stmt
);
2540 bool new_stmt_could_throw
= stmt_could_throw_p (new_stmt
);
2542 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2545 remove_stmt_from_eh_region (old_stmt
);
2546 if (new_stmt_could_throw
)
2548 add_stmt_to_eh_region (new_stmt
, region_nr
);
2558 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2559 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2560 this only handles handlers consisting of a single call, as that's the
2561 important case for C++: a destructor call for a particular object showing
2562 up in multiple handlers. */
2565 same_handler_p (gimple_seq oneh
, gimple_seq twoh
)
2567 gimple_stmt_iterator gsi
;
2571 gsi
= gsi_start (oneh
);
2572 if (!gsi_one_before_end_p (gsi
))
2574 ones
= gsi_stmt (gsi
);
2576 gsi
= gsi_start (twoh
);
2577 if (!gsi_one_before_end_p (gsi
))
2579 twos
= gsi_stmt (gsi
);
2581 if (!is_gimple_call (ones
)
2582 || !is_gimple_call (twos
)
2583 || gimple_call_lhs (ones
)
2584 || gimple_call_lhs (twos
)
2585 || gimple_call_chain (ones
)
2586 || gimple_call_chain (twos
)
2587 || !operand_equal_p (gimple_call_fn (ones
), gimple_call_fn (twos
), 0)
2588 || gimple_call_num_args (ones
) != gimple_call_num_args (twos
))
2591 for (ai
= 0; ai
< gimple_call_num_args (ones
); ++ai
)
2592 if (!operand_equal_p (gimple_call_arg (ones
, ai
),
2593 gimple_call_arg (twos
, ai
), 0))
2600 try { A() } finally { try { ~B() } catch { ~A() } }
2601 try { ... } finally { ~A() }
2603 try { A() } catch { ~B() }
2604 try { ~B() ... } finally { ~A() }
2606 This occurs frequently in C++, where A is a local variable and B is a
2607 temporary used in the initializer for A. */
2610 optimize_double_finally (gimple one
, gimple two
)
2613 gimple_stmt_iterator gsi
;
2615 gsi
= gsi_start (gimple_try_cleanup (one
));
2616 if (!gsi_one_before_end_p (gsi
))
2619 oneh
= gsi_stmt (gsi
);
2620 if (gimple_code (oneh
) != GIMPLE_TRY
2621 || gimple_try_kind (oneh
) != GIMPLE_TRY_CATCH
)
2624 if (same_handler_p (gimple_try_cleanup (oneh
), gimple_try_cleanup (two
)))
2626 gimple_seq seq
= gimple_try_eval (oneh
);
2628 gimple_try_set_cleanup (one
, seq
);
2629 gimple_try_set_kind (one
, GIMPLE_TRY_CATCH
);
2630 seq
= copy_gimple_seq_and_replace_locals (seq
);
2631 gimple_seq_add_seq (&seq
, gimple_try_eval (two
));
2632 gimple_try_set_eval (two
, seq
);
2636 /* Perform EH refactoring optimizations that are simpler to do when code
2637 flow has been lowered but EH structures haven't. */
2640 refactor_eh_r (gimple_seq seq
)
2642 gimple_stmt_iterator gsi
;
2647 gsi
= gsi_start (seq
);
2651 if (gsi_end_p (gsi
))
2654 two
= gsi_stmt (gsi
);
2657 && gimple_code (one
) == GIMPLE_TRY
2658 && gimple_code (two
) == GIMPLE_TRY
2659 && gimple_try_kind (one
) == GIMPLE_TRY_FINALLY
2660 && gimple_try_kind (two
) == GIMPLE_TRY_FINALLY
)
2661 optimize_double_finally (one
, two
);
2663 switch (gimple_code (one
))
2666 refactor_eh_r (gimple_try_eval (one
));
2667 refactor_eh_r (gimple_try_cleanup (one
));
2670 refactor_eh_r (gimple_catch_handler (one
));
2672 case GIMPLE_EH_FILTER
:
2673 refactor_eh_r (gimple_eh_filter_failure (one
));
2688 refactor_eh_r (gimple_body (current_function_decl
));
2692 struct gimple_opt_pass pass_refactor_eh
=
2698 refactor_eh
, /* execute */
2701 0, /* static_pass_number */
2702 TV_TREE_EH
, /* tv_id */
2703 PROP_gimple_lcf
, /* properties_required */
2704 0, /* properties_provided */
2705 0, /* properties_destroyed */
2706 0, /* todo_flags_start */
2707 TODO_dump_func
/* todo_flags_finish */
2711 /* Walk statements, see what regions are really references and remove unreachable ones. */
2714 tree_remove_unreachable_handlers (void)
2716 sbitmap reachable
, contains_stmt
;
2717 VEC(int,heap
) * label_to_region
;
2720 label_to_region
= label_to_region_map ();
2721 reachable
= sbitmap_alloc (num_eh_regions ());
2722 sbitmap_zero (reachable
);
2723 contains_stmt
= sbitmap_alloc (num_eh_regions ());
2724 sbitmap_zero (contains_stmt
);
2728 gimple_stmt_iterator gsi
;
2730 bool has_eh_preds
= bb_has_eh_pred (bb
);
2732 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2734 gimple stmt
= gsi_stmt (gsi
);
2736 if (gimple_code (stmt
) == GIMPLE_LABEL
&& has_eh_preds
)
2738 int uid
= LABEL_DECL_UID (gimple_label_label (stmt
));
2741 for (region
= VEC_index (int, label_to_region
, uid
);
2742 region
; region
= get_next_region_sharing_label (region
))
2743 SET_BIT (reachable
, region
);
2745 if (gimple_code (stmt
) == GIMPLE_RESX
)
2747 VEC_index (eh_region
, cfun
->eh
->region_array
,
2748 gimple_resx_region (stmt
))->region_number
);
2749 if ((region
= lookup_stmt_eh_region (stmt
)) >= 0)
2750 SET_BIT (contains_stmt
, region
);
2756 fprintf (dump_file
, "Before removal of unreachable regions:\n");
2757 dump_eh_tree (dump_file
, cfun
);
2758 fprintf (dump_file
, "Reachable regions: ");
2759 dump_sbitmap_file (dump_file
, reachable
);
2760 fprintf (dump_file
, "Regions containing insns: ");
2761 dump_sbitmap_file (dump_file
, contains_stmt
);
2764 remove_unreachable_regions (reachable
, contains_stmt
);
2765 sbitmap_free (reachable
);
2766 sbitmap_free (contains_stmt
);
2767 VEC_free (int, heap
, label_to_region
);
2770 fprintf (dump_file
, "\n\nAfter removal of unreachable regions:\n");
2771 dump_eh_tree (dump_file
, cfun
);
2772 fprintf (dump_file
, "\n\n");
2776 /* Pattern match emtpy EH receiver looking like:
2778 save_filt.6352_662 = [filter_expr] <<<filter object>>>;
2779 save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
2780 <<<exception object>>> = save_eptr.6351_663;
2781 <<<filter object>>> = save_filt.6352_662;
2784 And various minor variants after DCE or copy propagation.
2788 tree_empty_eh_handler_p (basic_block bb
)
2790 gimple_stmt_iterator gsi
;
2794 use_operand_p imm_use
;
2798 gsi
= gsi_last_bb (bb
);
2801 if (gsi_end_p (gsi
))
2803 if (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RESX
)
2805 region
= gimple_resx_region (gsi_stmt (gsi
));
2807 /* filter_object set. */
2809 if (gsi_end_p (gsi
))
2811 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_ASSIGN
)
2816 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi
))) != FILTER_EXPR
)
2818 filter_tmp
= gimple_assign_rhs1 (gsi_stmt (gsi
));
2820 /* filter_object set. */
2822 if (gsi_end_p (gsi
))
2824 if (gimple_code (gsi_stmt (gsi
)) != GIMPLE_ASSIGN
)
2826 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi
))) != EXC_PTR_EXPR
)
2828 exc_ptr_tmp
= gimple_assign_rhs1 (gsi_stmt (gsi
));
2831 if (TREE_CODE (exc_ptr_tmp
) != EXC_PTR_EXPR
)
2834 if (gsi_end_p (gsi
))
2836 if (gimple_code (gsi_stmt (gsi
)) != GIMPLE_ASSIGN
)
2838 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi
))) != EXC_PTR_EXPR
)
2840 if (exc_ptr_tmp
!= gimple_assign_lhs (gsi_stmt (gsi
)))
2842 if (!single_imm_use (exc_ptr_tmp
, &imm_use
, &use_stmt
))
2846 /* filter_object get. */
2847 if (TREE_CODE (filter_tmp
) != FILTER_EXPR
)
2850 if (gsi_end_p (gsi
))
2852 if (gimple_code (gsi_stmt (gsi
)) != GIMPLE_ASSIGN
)
2854 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi
))) != FILTER_EXPR
)
2856 if (filter_tmp
!= gimple_assign_lhs (gsi_stmt (gsi
)))
2858 if (!single_imm_use (filter_tmp
, &imm_use
, &use_stmt
))
2864 if (gsi_end_p (gsi
))
2867 if (gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2870 /* Be sure that there is at least on EH region reaching the block directly.
2871 After EH edge redirection, it is possible that block is reached by one handler
2872 but resumed by different. */
2873 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2874 if ((e
->flags
& EDGE_EH
))
2881 /* Return true if it is possible to remove basic block BB and propagate
2884 This means that every PHI in BB has all uses such that they are PHIs
2885 of basic blocks reachable througt BB and they appears only in use
2886 reachable by the edge from BB to the block contianing the use.
2888 This is same as in merge-phi code, but in slightly more general setting
2889 because BB can have multiple successors. */
2892 all_phis_safe_to_merge (basic_block bb
)
2894 gimple_stmt_iterator si
;
2897 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2899 gimple phi
= gsi_stmt (si
);
2900 tree result
= gimple_phi_result (phi
);
2902 use_operand_p imm_use
;
2903 imm_use_iterator imm_iter
;
2905 /* If the PHI's result is never used, then we can just
2907 if (has_zero_uses (result
))
2909 /* We can always rebuild virtuals if needed. */
2910 if (!is_gimple_reg (result
))
2912 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, result
)
2914 if (gimple_code (stmt
) != GIMPLE_PHI
)
2916 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2918 "PHI result has use in non-PHI statement.\n");
2920 BREAK_FROM_IMM_USE_STMT (imm_iter
);
2923 FOR_EACH_IMM_USE_ON_STMT (imm_use
, imm_iter
)
2926 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (imm_use
));
2929 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2930 fprintf (dump_file
, "PHI has use in PHI not reached from"
2931 "empty cleanup itself.\n");
2937 BREAK_FROM_IMM_USE_STMT (imm_iter
);
2945 static bool dominance_info_invalidated
;
2947 /* Information to pass into make_eh_edge_and_update_phi. */
2951 basic_block bb_to_remove
, bb
;
2952 edge edge_to_remove
;
2955 /* DATA points to update-info structure.
2956 Like make_eh_edge create EH edge from DATA->bb to basic block containing
2957 handler of REGION. In addition also update PHI operands by copying
2958 operands from DATA->bb_to_remove. */
2961 make_eh_edge_and_update_phi (struct eh_region_d
*region
, void *data
)
2963 struct update_info
*info
= (struct update_info
*) data
;
2966 basic_block src
, dst
;
2967 gimple_stmt_iterator si
;
2969 lab
= get_eh_region_tree_label (region
);
2972 dst
= label_to_block (lab
);
2974 e
= find_edge (src
, dst
);
2977 gcc_assert (e
->flags
& EDGE_EH
);
2981 dominance_info_invalidated
= true;
2982 e2
= find_edge (info
->bb_to_remove
, dst
);
2983 e
= make_edge (src
, dst
, EDGE_EH
);
2986 for (si
= gsi_start_phis (dst
); !gsi_end_p (si
); gsi_next (&si
))
2988 gimple phi
= gsi_stmt (si
);
2989 tree use
= USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e2
));
2990 gimple def
= (TREE_CODE (use
) == SSA_NAME
2991 ? SSA_NAME_DEF_STMT (use
) : NULL
);
2993 if (def
&& gimple_bb (def
) == info
->bb_to_remove
)
2995 use
= USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (def
,
2996 info
->edge_to_remove
));
2997 gcc_assert (info
->bb_to_remove
== info
->edge_to_remove
->dest
);
2998 def
= TREE_CODE (use
) == SSA_NAME
? SSA_NAME_DEF_STMT (use
) : NULL
;
3000 || gimple_bb (def
) != info
->bb_to_remove
3001 || !is_gimple_reg (use
));
3003 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
), use
);
3007 /* Make EH edges corresponding to STMT while updating PHI nodes after removal
3008 empty cleanup BB_TO_REMOVE joined to BB containing STMT
3011 Return if EDGE_TO_REMOVE was really removed. It might stay reachable when
3012 not all EH regions are cleaned up. */
3015 update_eh_edges (gimple stmt
, basic_block bb_to_remove
, edge edge_to_remove
)
3019 bool inlinable
= false;
3020 struct update_info info
;
3023 int probability_sum
= 0;
3024 bool removed
= false;
3026 info
.bb_to_remove
= bb_to_remove
;
3027 info
.bb
= gimple_bb (stmt
);
3028 info
.edge_to_remove
= edge_to_remove
;
3030 if (gimple_code (stmt
) == GIMPLE_RESX
)
3032 region_nr
= gimple_resx_region (stmt
);
3037 region_nr
= lookup_stmt_eh_region (stmt
);
3039 inlinable
= inlinable_call_p (stmt
);
3042 /* First add new edges as neccesary. */
3043 foreach_reachable_handler (region_nr
, is_resx
, inlinable
,
3044 make_eh_edge_and_update_phi
, &info
);
3046 /* And remove edges we didn't marked. */
3047 for (ei
= ei_start (info
.bb
->succs
); (e
= ei_safe_edge (ei
)); )
3049 if ((e
->flags
& EDGE_EH
) && !e
->aux
)
3051 dominance_info_invalidated
= true;
3052 if (e
== edge_to_remove
)
3059 probability_sum
+= e
->probability
;
3064 /* Make CFG profile more consistent assuming that exception will resume to
3065 first available EH handler. In practice this makes little difference, but
3066 we get fewer consistency errors in the dumps. */
3067 if (is_resx
&& EDGE_COUNT (info
.bb
->succs
) && !probability_sum
)
3068 EDGE_SUCC (info
.bb
, 0)->probability
= REG_BR_PROB_BASE
;
3072 /* Look for basic blocks containing empty exception handler and remove them.
3073 This is similar to jump forwarding, just across EH edges. */
3076 cleanup_empty_eh (basic_block bb
, VEC(int,heap
) * label_to_region
)
3079 gimple_stmt_iterator si
;
3082 /* When handler of EH region winds up to be empty, we can safely
3083 remove it. This leads to inner EH regions to be redirected
3084 to outer one, if present in function. So we need to rebuild
3085 EH edges in all sources. */
3086 if ((region
= tree_empty_eh_handler_p (bb
))
3087 && all_phis_safe_to_merge (bb
))
3090 bool found
= false, removed_some
= false, has_non_eh_preds
= false;
3091 gimple_stmt_iterator gsi
;
3093 /* Look for all EH regions sharing label of this block.
3094 If they are not same as REGION, remove them and replace them
3095 by outer region of REGION. Also note if REGION itself is one
3098 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3099 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
3101 int uid
= LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi
)));
3102 int r
= VEC_index (int, label_to_region
, uid
);
3107 next
= get_next_region_sharing_label (r
);
3112 removed_some
= true;
3113 remove_eh_region_and_replace_by_outer_of (r
, region
);
3114 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3115 fprintf (dump_file
, "Empty EH handler %i removed and "
3116 "replaced by %i\n", r
, region
);
3124 gcc_assert (found
|| removed_some
);
3125 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3126 if (!(e
->flags
& EDGE_EH
))
3127 has_non_eh_preds
= true;
3129 /* When block is empty EH cleanup, but it is reachable via non-EH code too,
3130 we can not remove the region it is resumed via, because doing so will
3131 lead to redirection of its RESX edges.
3133 This case will be handled later after edge forwarding if the EH cleanup
3136 if (found
&& !has_non_eh_preds
)
3138 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3139 fprintf (dump_file
, "Empty EH handler %i removed.\n", region
);
3140 remove_eh_region (region
);
3142 else if (!removed_some
)
3145 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
3147 basic_block src
= e
->src
;
3148 if (!(e
->flags
& EDGE_EH
))
3153 if (stmt_can_throw_internal (last_stmt (src
)))
3155 if (!update_eh_edges (last_stmt (src
), bb
, e
))
3162 /* Verify that we eliminated all uses of PHI we are going to remove.
3163 If we didn't, rebuild SSA on affected variable (this is allowed only
3165 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
3167 gimple phi
= gsi_stmt (si
);
3168 tree result
= gimple_phi_result (phi
);
3169 if (!has_zero_uses (result
))
3171 use_operand_p use_p
;
3172 imm_use_iterator iter
;
3175 FOR_EACH_IMM_USE_STMT (stmt
, iter
, result
)
3177 /* We have use, see if it won't disappear after
3179 if (gimple_bb (stmt
) == bb
)
3181 if (gimple_code (stmt
) == GIMPLE_PHI
)
3185 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
3186 if (gimple_phi_arg_edge (stmt
,
3187 PHI_ARG_INDEX_FROM_USE (use_p
))->src
!= bb
)
3197 gcc_assert (!is_gimple_reg (result
));
3198 mark_sym_for_renaming (SSA_NAME_VAR (result
));
3199 /* As we are going to delete this block we will release all
3200 defs which makes the immediate uses on use stmts invalid.
3201 Avoid that by replacing all uses with the bare variable
3202 and updating the stmts. */
3203 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
3204 SET_USE (use_p
, SSA_NAME_VAR (result
));
3209 if (!ei_safe_edge (ei_start (bb
->preds
)))
3210 delete_basic_block (bb
);
3217 /* Perform cleanups and lowering of exception handling
3218 1) cleanups regions with handlers doing nothing are optimized out
3219 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3220 3) Info about regions that are containing instructions, and regions
3221 reachable via local EH edges is collected
3222 4) Eh tree is pruned for regions no longer neccesary.
3228 bool changed
= false;
3230 VEC(int,heap
) * label_to_region
;
3237 fprintf (dump_file
, "Before cleanups:\n");
3238 dump_eh_tree (dump_file
, cfun
);
3243 label_to_region
= label_to_region_map ();
3244 dominance_info_invalidated
= false;
3245 /* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
3246 for (i
= NUM_FIXED_BLOCKS
; i
< last_basic_block
; i
++)
3248 bb
= BASIC_BLOCK (i
);
3250 changed
|= cleanup_empty_eh (bb
, label_to_region
);
3252 VEC_free (int, heap
, label_to_region
);
3253 if (dominance_info_invalidated
)
3255 free_dominance_info (CDI_DOMINATORS
);
3256 free_dominance_info (CDI_POST_DOMINATORS
);
3259 /* Removing contained cleanup can render MUST_NOT_THROW regions empty. */
3261 delete_unreachable_blocks ();
3264 tree_remove_unreachable_handlers ();
3267 fprintf (dump_file
, "After cleanups:\n");
3268 dump_eh_tree (dump_file
, cfun
);
3271 return (changed
? TODO_cleanup_cfg
| TODO_update_ssa
: 0);
3274 struct gimple_opt_pass pass_cleanup_eh
= {
3277 "ehcleanup", /* name */
3279 cleanup_eh
, /* execute */
3282 0, /* static_pass_number */
3283 TV_TREE_EH
, /* tv_id */
3284 PROP_gimple_lcf
, /* properties_required */
3285 0, /* properties_provided */
3286 0, /* properties_destroyed */
3287 0, /* todo_flags_start */
3288 TODO_dump_func
/* todo_flags_finish */