1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p
= 0;
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p
= 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
58 struct_ptr_eq (const void *a
, const void *b
)
60 const void * const * x
= a
;
61 const void * const * y
= b
;
66 struct_ptr_hash (const void *a
)
68 const void * const * x
= a
;
69 return (size_t)*x
>> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
86 record_stmt_eh_region (struct eh_region
*region
, tree t
)
91 add_stmt_to_eh_region (t
, get_eh_region_number (region
));
95 add_stmt_to_eh_region_fn (struct function
*ifun
, tree t
, int num
)
97 struct throw_stmt_node
*n
;
100 gcc_assert (num
>= 0);
101 gcc_assert (TREE_CODE (t
) != RESX_EXPR
);
103 n
= ggc_alloc (sizeof (*n
));
107 if (!get_eh_throw_stmt_table (ifun
))
108 set_eh_throw_stmt_table (ifun
, htab_create_ggc (31, struct_ptr_hash
,
112 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), n
, INSERT
);
115 /* ??? For the benefit of calls.c, converting all this to rtl,
116 we need to record the call expression, not just the outer
118 if (TREE_CODE (t
) == MODIFY_EXPR
119 && (t
= get_call_expr_in (t
)))
120 add_stmt_to_eh_region_fn (ifun
, t
, num
);
124 add_stmt_to_eh_region (tree t
, int num
)
126 add_stmt_to_eh_region_fn (cfun
, t
, num
);
130 remove_stmt_from_eh_region_fn (struct function
*ifun
, tree t
)
132 struct throw_stmt_node dummy
;
135 if (!get_eh_throw_stmt_table (ifun
))
139 slot
= htab_find_slot (get_eh_throw_stmt_table (ifun
), &dummy
,
143 htab_clear_slot (get_eh_throw_stmt_table (ifun
), slot
);
144 /* ??? For the benefit of calls.c, converting all this to rtl,
145 we need to record the call expression, not just the outer
147 if (TREE_CODE (t
) == MODIFY_EXPR
148 && (t
= get_call_expr_in (t
)))
149 remove_stmt_from_eh_region_fn (ifun
, t
);
157 remove_stmt_from_eh_region (tree t
)
159 return remove_stmt_from_eh_region_fn (cfun
, t
);
163 lookup_stmt_eh_region_fn (struct function
*ifun
, tree t
)
165 struct throw_stmt_node
*p
, n
;
167 if (!get_eh_throw_stmt_table (ifun
))
171 p
= htab_find (get_eh_throw_stmt_table (ifun
), &n
);
173 return (p
? p
->region_nr
: -1);
177 lookup_stmt_eh_region (tree t
)
179 /* We can get called from initialized data when -fnon-call-exceptions
180 is on; prevent crash. */
183 return lookup_stmt_eh_region_fn (cfun
, t
);
187 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
188 nodes and LABEL_DECL nodes. We will use this during the second phase to
189 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
191 struct finally_tree_node
196 /* Note that this table is *not* marked GTY. It is short-lived. */
197 static htab_t finally_tree
;
200 record_in_finally_tree (tree child
, tree parent
)
202 struct finally_tree_node
*n
;
205 n
= xmalloc (sizeof (*n
));
209 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
215 collect_finally_tree (tree t
, tree region
)
218 switch (TREE_CODE (t
))
221 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
224 case TRY_FINALLY_EXPR
:
225 record_in_finally_tree (t
, region
);
226 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
227 t
= TREE_OPERAND (t
, 1);
231 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
232 t
= TREE_OPERAND (t
, 1);
240 t
= EH_FILTER_FAILURE (t
);
245 tree_stmt_iterator i
;
246 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
247 collect_finally_tree (tsi_stmt (i
), region
);
252 /* A type, a decl, or some kind of statement that we're not
253 interested in. Don't walk them. */
258 /* Use the finally tree to determine if a jump from START to TARGET
259 would leave the try_finally node that START lives in. */
262 outside_finally_tree (tree start
, tree target
)
264 struct finally_tree_node n
, *p
;
269 p
= htab_find (finally_tree
, &n
);
274 while (start
!= target
);
279 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
280 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
281 The eh region creation is straight-forward, but frobbing all the gotos
282 and such into shape isn't. */
284 /* State of the world while lowering. */
288 /* What's "current" while constructing the eh region tree. These
289 correspond to variables of the same name in cfun->eh, which we
290 don't have easy access to. */
291 struct eh_region
*cur_region
;
292 struct eh_region
*prev_try
;
294 /* Processing of TRY_FINALLY requires a bit more state. This is
295 split out into a separate structure so that we don't have to
296 copy so much when processing other nodes. */
297 struct leh_tf_state
*tf
;
302 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
303 is the original TRY_FINALLY_EXPR. We need to retain this so that
304 outside_finally_tree can reliably reference the tree used in the
305 collect_finally_tree data structures. */
306 tree try_finally_expr
;
309 /* The state outside this try_finally node. */
310 struct leh_state
*outer
;
312 /* The exception region created for it. */
313 struct eh_region
*region
;
315 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
316 that are seen to escape this TRY_FINALLY_EXPR node. */
317 struct goto_queue_node
{
323 size_t goto_queue_size
;
324 size_t goto_queue_active
;
326 /* The set of unique labels seen as entries in the goto queue. */
327 VEC(tree
,heap
) *dest_array
;
329 /* A label to be added at the end of the completed transformed
330 sequence. It will be set if may_fallthru was true *at one time*,
331 though subsequent transformations may have cleared that flag. */
334 /* A label that has been registered with except.c to be the
335 landing pad for this try block. */
338 /* True if it is possible to fall out the bottom of the try block.
339 Cleared if the fallthru is converted to a goto. */
342 /* True if any entry in goto_queue is a RETURN_EXPR. */
345 /* True if the finally block can receive an exception edge.
346 Cleared if the exception case is handled by code duplication. */
350 static void lower_eh_filter (struct leh_state
*, tree
*);
351 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
353 /* Comparison function for qsort/bsearch. We're interested in
354 searching goto queue elements for source statements. */
357 goto_queue_cmp (const void *x
, const void *y
)
359 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
360 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
361 return (a
== b
? 0 : a
< b
? -1 : 1);
364 /* Search for STMT in the goto queue. Return the replacement,
365 or null if the statement isn't in the queue. */
368 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
370 struct goto_queue_node tmp
, *ret
;
372 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
373 sizeof (struct goto_queue_node
), goto_queue_cmp
);
374 return (ret
? ret
->repl_stmt
: NULL
);
377 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
378 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
379 then we can just splat it in, otherwise we add the new stmts immediately
380 after the COND_EXPR and redirect. */
383 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
384 tree_stmt_iterator
*tsi
)
386 tree
new, one
, label
;
388 new = find_goto_replacement (tf
, *tp
);
392 one
= expr_only (new);
393 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
399 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
400 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
402 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
403 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
406 /* The real work of replace_goto_queue. Returns with TSI updated to
407 point to the next statement. */
409 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
412 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
414 switch (TREE_CODE (t
))
418 t
= find_goto_replacement (tf
, t
);
421 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
428 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
429 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
432 case TRY_FINALLY_EXPR
:
434 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
435 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
438 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
441 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
448 /* These won't have gotos in them. */
455 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
458 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
460 tree_stmt_iterator i
= tsi_start (t
);
461 while (!tsi_end_p (i
))
462 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
465 /* Replace all goto queue members. */
468 replace_goto_queue (struct leh_tf_state
*tf
)
470 if (tf
->goto_queue_active
== 0)
472 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
475 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
476 node, and if so record that fact in the goto queue associated with that
480 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
482 struct leh_tf_state
*tf
= state
->tf
;
483 struct goto_queue_node
*q
;
490 switch (TREE_CODE (stmt
))
494 tree lab
= GOTO_DESTINATION (stmt
);
496 /* Computed and non-local gotos do not get processed. Given
497 their nature we can neither tell whether we've escaped the
498 finally block nor redirect them if we knew. */
499 if (TREE_CODE (lab
) != LABEL_DECL
)
502 /* No need to record gotos that don't leave the try block. */
503 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
506 if (! tf
->dest_array
)
508 tf
->dest_array
= VEC_alloc (tree
, heap
, 10);
509 VEC_quick_push (tree
, tf
->dest_array
, lab
);
514 int n
= VEC_length (tree
, tf
->dest_array
);
515 for (index
= 0; index
< n
; ++index
)
516 if (VEC_index (tree
, tf
->dest_array
, index
) == lab
)
519 VEC_safe_push (tree
, heap
, tf
->dest_array
, lab
);
525 tf
->may_return
= true;
533 active
= tf
->goto_queue_active
;
534 size
= tf
->goto_queue_size
;
537 size
= (size
? size
* 2 : 32);
538 tf
->goto_queue_size
= size
;
540 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
543 q
= &tf
->goto_queue
[active
];
544 tf
->goto_queue_active
= active
+ 1;
546 memset (q
, 0, sizeof (*q
));
551 #ifdef ENABLE_CHECKING
552 /* We do not process SWITCH_EXPRs for now. As long as the original source
553 was in fact structured, and we've not yet done jump threading, then none
554 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
557 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
559 struct leh_tf_state
*tf
= state
->tf
;
566 vec
= SWITCH_LABELS (switch_expr
);
567 n
= TREE_VEC_LENGTH (vec
);
569 for (i
= 0; i
< n
; ++i
)
571 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
572 gcc_assert (!outside_finally_tree (lab
, tf
->try_finally_expr
));
576 #define verify_norecord_switch_expr(state, switch_expr)
579 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
580 whatever is needed to finish the return. If MOD is non-null, insert it
581 before the new branch. RETURN_VALUE_P is a cache containing a temporary
582 variable to be used in manipulating the value returned from the function. */
585 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
586 tree
*return_value_p
)
588 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
593 /* The nasty part about redirecting the return value is that the
594 return value itself is to be computed before the FINALLY block
608 should return 0, not 1. Arrange for this to happen by copying
609 computed the return value into a local temporary. This also
610 allows us to redirect multiple return statements through the
611 same destination block; whether this is a net win or not really
612 depends, I guess, but it does make generation of the switch in
613 lower_try_finally_switch easier. */
615 switch (TREE_CODE (ret_expr
))
618 if (!*return_value_p
)
619 *return_value_p
= ret_expr
;
621 gcc_assert (*return_value_p
== ret_expr
);
622 q
->cont_stmt
= q
->stmt
;
627 tree result
= TREE_OPERAND (ret_expr
, 0);
628 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
630 if (!*return_value_p
)
632 if (aggregate_value_p (TREE_TYPE (result
),
633 TREE_TYPE (current_function_decl
)))
634 /* If this function returns in memory, copy the argument
635 into the return slot now. Otherwise, we might need to
636 worry about magic return semantics, so we need to use a
637 temporary to hold the value until we're actually ready
641 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
642 *return_value_p
= new;
645 new = *return_value_p
;
647 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
648 append_to_statement_list (x
, &q
->repl_stmt
);
653 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
654 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
663 /* If we don't return a value, all return statements are the same. */
664 q
->cont_stmt
= q
->stmt
;
668 append_to_statement_list (mod
, &q
->repl_stmt
);
670 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
671 append_to_statement_list (x
, &q
->repl_stmt
);
674 /* Similar, but easier, for GOTO_EXPR. */
677 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
681 q
->cont_stmt
= q
->stmt
;
683 append_to_statement_list (mod
, &q
->repl_stmt
);
685 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
686 append_to_statement_list (x
, &q
->repl_stmt
);
689 /* We want to transform
690 try { body; } catch { stuff; }
692 body; goto over; lab: stuff; over:
694 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
695 should be placed before the second operand, or NULL. OVER is
696 an existing label that should be put at the exit, or NULL. */
699 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
703 op1
= TREE_OPERAND (*tp
, 1);
704 *tp
= TREE_OPERAND (*tp
, 0);
706 if (block_may_fallthru (*tp
))
709 over
= create_artificial_label ();
710 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
711 append_to_statement_list (x
, tp
);
716 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
717 append_to_statement_list (x
, tp
);
720 append_to_statement_list (op1
, tp
);
724 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
725 append_to_statement_list (x
, tp
);
729 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
730 Make sure to record all new labels found. */
733 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
737 t
= unsave_expr_now (t
);
740 region
= outer_state
->tf
->try_finally_expr
;
741 collect_finally_tree (t
, region
);
746 /* A subroutine of lower_try_finally. Create a fallthru label for
747 the given try_finally state. The only tricky bit here is that
748 we have to make sure to record the label in our outer context. */
751 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
753 tree label
= tf
->fallthru_label
;
756 label
= create_artificial_label ();
757 tf
->fallthru_label
= label
;
759 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
764 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
765 returns non-null, then the language requires that the exception path out
766 of a try_finally be treated specially. To wit: the code within the
767 finally block may not itself throw an exception. We have two choices here.
768 First we can duplicate the finally block and wrap it in a must_not_throw
769 region. Second, we can generate code like
774 if (fintmp == eh_edge)
775 protect_cleanup_actions;
778 where "fintmp" is the temporary used in the switch statement generation
779 alternative considered below. For the nonce, we always choose the first
782 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
785 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
786 struct leh_state
*this_state
,
787 struct leh_tf_state
*tf
)
789 tree protect_cleanup_actions
, finally
, x
;
790 tree_stmt_iterator i
;
791 bool finally_may_fallthru
;
793 /* First check for nothing to do. */
794 if (lang_protect_cleanup_actions
)
795 protect_cleanup_actions
= lang_protect_cleanup_actions ();
797 protect_cleanup_actions
= NULL
;
799 finally
= TREE_OPERAND (*tf
->top_p
, 1);
801 /* If the EH case of the finally block can fall through, this may be a
802 structure of the form
815 E.g. with an inline destructor with an embedded try block. In this
816 case we must save the runtime EH data around the nested exception.
818 This complication means that any time the previous runtime data might
819 be used (via fallthru from the finally) we handle the eh case here,
820 whether or not protect_cleanup_actions is active. */
822 finally_may_fallthru
= block_may_fallthru (finally
);
823 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
826 /* Duplicate the FINALLY block. Only need to do this for try-finally,
827 and not for cleanups. */
829 finally
= lower_try_finally_dup_block (finally
, outer_state
);
831 /* Resume execution after the exception. Adding this now lets
832 lower_eh_filter not add unnecessary gotos, as it is clear that
833 we never fallthru from this copy of the finally block. */
834 if (finally_may_fallthru
)
836 tree save_eptr
, save_filt
;
838 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
839 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
841 i
= tsi_start (finally
);
842 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
843 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
844 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
846 x
= build (FILTER_EXPR
, integer_type_node
);
847 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
848 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
850 i
= tsi_last (finally
);
851 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
852 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
853 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
855 x
= build (FILTER_EXPR
, integer_type_node
);
856 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
857 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
859 x
= build_resx (get_eh_region_number (tf
->region
));
860 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
863 /* Wrap the block with protect_cleanup_actions as the action. */
864 if (protect_cleanup_actions
)
866 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
867 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
868 EH_FILTER_MUST_NOT_THROW (x
) = 1;
869 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
870 lower_eh_filter (outer_state
, &finally
);
873 lower_eh_constructs_1 (outer_state
, &finally
);
875 /* Hook this up to the end of the existing try block. If we
876 previously fell through the end, we'll have to branch around.
877 This means adding a new goto, and adding it to the queue. */
879 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
881 if (tf
->may_fallthru
)
883 x
= lower_try_finally_fallthru_label (tf
);
884 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
885 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
888 maybe_record_in_goto_queue (this_state
, x
);
890 tf
->may_fallthru
= false;
893 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
894 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
895 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
897 /* Having now been handled, EH isn't to be considered with
898 the rest of the outgoing edges. */
899 tf
->may_throw
= false;
902 /* A subroutine of lower_try_finally. We have determined that there is
903 no fallthru edge out of the finally block. This means that there is
904 no outgoing edge corresponding to any incoming edge. Restructure the
905 try_finally node for this special case. */
908 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
910 tree x
, finally
, lab
, return_val
;
911 struct goto_queue_node
*q
, *qe
;
916 lab
= create_artificial_label ();
918 finally
= TREE_OPERAND (*tf
->top_p
, 1);
919 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
921 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
922 append_to_statement_list (x
, tf
->top_p
);
926 qe
= q
+ tf
->goto_queue_active
;
929 do_return_redirection (q
, lab
, NULL
, &return_val
);
931 do_goto_redirection (q
, lab
, NULL
);
933 replace_goto_queue (tf
);
935 lower_eh_constructs_1 (state
, &finally
);
936 append_to_statement_list (finally
, tf
->top_p
);
939 /* A subroutine of lower_try_finally. We have determined that there is
940 exactly one destination of the finally block. Restructure the
941 try_finally node for this special case. */
944 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
946 struct goto_queue_node
*q
, *qe
;
947 tree x
, finally
, finally_label
;
949 finally
= TREE_OPERAND (*tf
->top_p
, 1);
950 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
952 lower_eh_constructs_1 (state
, &finally
);
956 /* Only reachable via the exception edge. Add the given label to
957 the head of the FINALLY block. Append a RESX at the end. */
959 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
960 append_to_statement_list (x
, tf
->top_p
);
962 append_to_statement_list (finally
, tf
->top_p
);
964 x
= build_resx (get_eh_region_number (tf
->region
));
966 append_to_statement_list (x
, tf
->top_p
);
971 if (tf
->may_fallthru
)
973 /* Only reachable via the fallthru edge. Do nothing but let
974 the two blocks run together; we'll fall out the bottom. */
975 append_to_statement_list (finally
, tf
->top_p
);
979 finally_label
= create_artificial_label ();
980 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
981 append_to_statement_list (x
, tf
->top_p
);
983 append_to_statement_list (finally
, tf
->top_p
);
986 qe
= q
+ tf
->goto_queue_active
;
990 /* Reachable by return expressions only. Redirect them. */
991 tree return_val
= NULL
;
993 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
994 replace_goto_queue (tf
);
998 /* Reachable by goto expressions only. Redirect them. */
1000 do_goto_redirection (q
, finally_label
, NULL
);
1001 replace_goto_queue (tf
);
1003 if (VEC_index (tree
, tf
->dest_array
, 0) == tf
->fallthru_label
)
1005 /* Reachable by goto to fallthru label only. Redirect it
1006 to the new label (already created, sadly), and do not
1007 emit the final branch out, or the fallthru label. */
1008 tf
->fallthru_label
= NULL
;
1013 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
1014 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
1017 /* A subroutine of lower_try_finally. There are multiple edges incoming
1018 and outgoing from the finally block. Implement this by duplicating the
1019 finally block for every destination. */
1022 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1024 tree finally
, new_stmt
;
1027 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1028 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1030 new_stmt
= NULL_TREE
;
1032 if (tf
->may_fallthru
)
1034 x
= lower_try_finally_dup_block (finally
, state
);
1035 lower_eh_constructs_1 (state
, &x
);
1036 append_to_statement_list (x
, &new_stmt
);
1038 x
= lower_try_finally_fallthru_label (tf
);
1039 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1040 append_to_statement_list (x
, &new_stmt
);
1045 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1046 append_to_statement_list (x
, &new_stmt
);
1048 x
= lower_try_finally_dup_block (finally
, state
);
1049 lower_eh_constructs_1 (state
, &x
);
1050 append_to_statement_list (x
, &new_stmt
);
1052 x
= build_resx (get_eh_region_number (tf
->region
));
1053 append_to_statement_list (x
, &new_stmt
);
1058 struct goto_queue_node
*q
, *qe
;
1059 tree return_val
= NULL
;
1060 int return_index
, index
;
1063 struct goto_queue_node
*q
;
1067 return_index
= VEC_length (tree
, tf
->dest_array
);
1068 labels
= xcalloc (sizeof (*labels
), return_index
+ 1);
1071 qe
= q
+ tf
->goto_queue_active
;
1074 index
= q
->index
< 0 ? return_index
: q
->index
;
1076 if (!labels
[index
].q
)
1077 labels
[index
].q
= q
;
1080 for (index
= 0; index
< return_index
+ 1; index
++)
1084 q
= labels
[index
].q
;
1088 lab
= labels
[index
].label
= create_artificial_label ();
1090 if (index
== return_index
)
1091 do_return_redirection (q
, lab
, NULL
, &return_val
);
1093 do_goto_redirection (q
, lab
, NULL
);
1095 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1096 append_to_statement_list (x
, &new_stmt
);
1098 x
= lower_try_finally_dup_block (finally
, state
);
1099 lower_eh_constructs_1 (state
, &x
);
1100 append_to_statement_list (x
, &new_stmt
);
1102 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1103 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1106 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1110 index
= q
->index
< 0 ? return_index
: q
->index
;
1112 if (labels
[index
].q
== q
)
1115 lab
= labels
[index
].label
;
1117 if (index
== return_index
)
1118 do_return_redirection (q
, lab
, NULL
, &return_val
);
1120 do_goto_redirection (q
, lab
, NULL
);
1123 replace_goto_queue (tf
);
1127 /* Need to link new stmts after running replace_goto_queue due
1128 to not wanting to process the same goto stmts twice. */
1129 append_to_statement_list (new_stmt
, tf
->top_p
);
1132 /* A subroutine of lower_try_finally. There are multiple edges incoming
1133 and outgoing from the finally block. Implement this by instrumenting
1134 each incoming edge and creating a switch statement at the end of the
1135 finally block that branches to the appropriate destination. */
1138 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1140 struct goto_queue_node
*q
, *qe
;
1141 tree return_val
= NULL
;
1142 tree finally
, finally_tmp
, finally_label
;
1143 int return_index
, eh_index
, fallthru_index
;
1144 int nlabels
, ndests
, j
, last_case_index
;
1145 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1148 /* Mash the TRY block to the head of the chain. */
1149 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1150 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1152 /* Lower the finally block itself. */
1153 lower_eh_constructs_1 (state
, &finally
);
1155 /* Prepare for switch statement generation. */
1156 nlabels
= VEC_length (tree
, tf
->dest_array
);
1157 return_index
= nlabels
;
1158 eh_index
= return_index
+ tf
->may_return
;
1159 fallthru_index
= eh_index
+ tf
->may_throw
;
1160 ndests
= fallthru_index
+ tf
->may_fallthru
;
1162 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1163 finally_label
= create_artificial_label ();
1165 case_label_vec
= make_tree_vec (ndests
);
1166 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1167 NULL_TREE
, case_label_vec
);
1170 last_case_index
= 0;
1172 /* Begin inserting code for getting to the finally block. Things
1173 are done in this order to correspond to the sequence the code is
1176 if (tf
->may_fallthru
)
1178 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1179 build_int_cst (NULL_TREE
, fallthru_index
));
1180 append_to_statement_list (x
, tf
->top_p
);
1184 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1185 append_to_statement_list (x
, tf
->top_p
);
1189 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1190 build_int_cst (NULL_TREE
, fallthru_index
), NULL
,
1191 create_artificial_label ());
1192 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1195 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1196 append_to_statement_list (x
, &switch_body
);
1198 x
= lower_try_finally_fallthru_label (tf
);
1199 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1200 append_to_statement_list (x
, &switch_body
);
1205 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1206 append_to_statement_list (x
, tf
->top_p
);
1208 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1209 build_int_cst (NULL_TREE
, eh_index
));
1210 append_to_statement_list (x
, tf
->top_p
);
1212 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1213 build_int_cst (NULL_TREE
, eh_index
), NULL
,
1214 create_artificial_label ());
1215 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1218 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1219 append_to_statement_list (x
, &switch_body
);
1220 x
= build_resx (get_eh_region_number (tf
->region
));
1221 append_to_statement_list (x
, &switch_body
);
1224 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1225 append_to_statement_list (x
, tf
->top_p
);
1227 append_to_statement_list (finally
, tf
->top_p
);
1229 /* Redirect each incoming goto edge. */
1231 qe
= q
+ tf
->goto_queue_active
;
1232 j
= last_case_index
+ tf
->may_return
;
1236 int switch_id
, case_index
;
1240 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1241 build_int_cst (NULL_TREE
, return_index
));
1242 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1243 switch_id
= return_index
;
1247 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1248 build_int_cst (NULL_TREE
, q
->index
));
1249 do_goto_redirection (q
, finally_label
, mod
);
1250 switch_id
= q
->index
;
1253 case_index
= j
+ q
->index
;
1254 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1255 TREE_VEC_ELT (case_label_vec
, case_index
)
1256 = build (CASE_LABEL_EXPR
, void_type_node
,
1257 build_int_cst (NULL_TREE
, switch_id
), NULL
,
1258 /* We store the cont_stmt in the
1259 CASE_LABEL, so that we can recover it
1260 in the loop below. We don't create
1261 the new label while walking the
1262 goto_queue because pointers don't
1263 offer a stable order. */
1266 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1271 last_case
= TREE_VEC_ELT (case_label_vec
, j
);
1273 gcc_assert (last_case
);
1275 cont_stmt
= CASE_LABEL (last_case
);
1277 label
= create_artificial_label ();
1278 CASE_LABEL (last_case
) = label
;
1280 x
= build (LABEL_EXPR
, void_type_node
, label
);
1281 append_to_statement_list (x
, &switch_body
);
1282 append_to_statement_list (cont_stmt
, &switch_body
);
1283 maybe_record_in_goto_queue (state
, cont_stmt
);
1285 replace_goto_queue (tf
);
1287 /* Make sure that the last case is the default label, as one is required.
1288 Then sort the labels, which is also required in GIMPLE. */
1289 CASE_LOW (last_case
) = NULL
;
1290 sort_case_labels (case_label_vec
);
1292 /* Need to link switch_stmt after running replace_goto_queue due
1293 to not wanting to process the same goto stmts twice. */
1294 append_to_statement_list (switch_stmt
, tf
->top_p
);
1295 append_to_statement_list (switch_body
, tf
->top_p
);
1298 /* Decide whether or not we are going to duplicate the finally block.
1299 There are several considerations.
1301 First, if this is Java, then the finally block contains code
1302 written by the user. It has line numbers associated with it,
1303 so duplicating the block means it's difficult to set a breakpoint.
1304 Since controlling code generation via -g is verboten, we simply
1305 never duplicate code without optimization.
1307 Second, we'd like to prevent egregious code growth. One way to
1308 do this is to estimate the size of the finally block, multiply
1309 that by the number of copies we'd need to make, and compare against
1310 the estimate of the size of the switch machinery we'd have to add. */
1313 decide_copy_try_finally (int ndests
, tree finally
)
1315 int f_estimate
, sw_estimate
;
1320 /* Finally estimate N times, plus N gotos. */
1321 f_estimate
= estimate_num_insns (finally
);
1322 f_estimate
= (f_estimate
+ 1) * ndests
;
1324 /* Switch statement (cost 10), N variable assignments, N gotos. */
1325 sw_estimate
= 10 + 2 * ndests
;
1327 /* Optimize for size clearly wants our best guess. */
1329 return f_estimate
< sw_estimate
;
1331 /* ??? These numbers are completely made up so far. */
1333 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1335 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1338 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1339 to a sequence of labels and blocks, plus the exception region trees
1340 that record all the magic. This is complicated by the need to
1341 arrange for the FINALLY block to be executed on all exits. */
1344 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1346 struct leh_tf_state this_tf
;
1347 struct leh_state this_state
;
1350 /* Process the try block. */
1352 memset (&this_tf
, 0, sizeof (this_tf
));
1353 this_tf
.try_finally_expr
= *tp
;
1355 this_tf
.outer
= state
;
1356 if (using_eh_for_cleanups_p
)
1358 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1360 this_tf
.region
= NULL
;
1362 this_state
.cur_region
= this_tf
.region
;
1363 this_state
.prev_try
= state
->prev_try
;
1364 this_state
.tf
= &this_tf
;
1366 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1368 /* Determine if the try block is escaped through the bottom. */
1369 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1371 /* Determine if any exceptions are possible within the try block. */
1372 if (using_eh_for_cleanups_p
)
1373 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1374 if (this_tf
.may_throw
)
1376 this_tf
.eh_label
= create_artificial_label ();
1377 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1378 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1381 /* Sort the goto queue for efficient searching later. */
1382 if (this_tf
.goto_queue_active
> 1)
1383 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1384 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1386 /* Determine how many edges (still) reach the finally block. Or rather,
1387 how many destinations are reached by the finally block. Use this to
1388 determine how we process the finally block itself. */
1390 ndests
= VEC_length (tree
, this_tf
.dest_array
);
1391 ndests
+= this_tf
.may_fallthru
;
1392 ndests
+= this_tf
.may_return
;
1393 ndests
+= this_tf
.may_throw
;
1395 /* If the FINALLY block is not reachable, dike it out. */
1397 *tp
= TREE_OPERAND (*tp
, 0);
1399 /* If the finally block doesn't fall through, then any destination
1400 we might try to impose there isn't reached either. There may be
1401 some minor amount of cleanup and redirection still needed. */
1402 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1403 lower_try_finally_nofallthru (state
, &this_tf
);
1405 /* We can easily special-case redirection to a single destination. */
1406 else if (ndests
== 1)
1407 lower_try_finally_onedest (state
, &this_tf
);
1409 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1410 lower_try_finally_copy (state
, &this_tf
);
1412 lower_try_finally_switch (state
, &this_tf
);
1414 /* If someone requested we add a label at the end of the transformed
1416 if (this_tf
.fallthru_label
)
1418 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1419 append_to_statement_list (x
, tp
);
1422 VEC_free (tree
, heap
, this_tf
.dest_array
);
1423 if (this_tf
.goto_queue
)
1424 free (this_tf
.goto_queue
);
1427 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1428 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1429 exception region trees that record all the magic. */
1432 lower_catch (struct leh_state
*state
, tree
*tp
)
1434 struct eh_region
*try_region
;
1435 struct leh_state this_state
;
1436 tree_stmt_iterator i
;
1439 try_region
= gen_eh_region_try (state
->cur_region
);
1440 this_state
.cur_region
= try_region
;
1441 this_state
.prev_try
= try_region
;
1442 this_state
.tf
= state
->tf
;
1444 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1446 if (!get_eh_region_may_contain_throw (try_region
))
1448 *tp
= TREE_OPERAND (*tp
, 0);
1453 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1455 struct eh_region
*catch_region
;
1456 tree
catch, x
, eh_label
;
1458 catch = tsi_stmt (i
);
1459 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1461 this_state
.cur_region
= catch_region
;
1462 this_state
.prev_try
= state
->prev_try
;
1463 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1465 eh_label
= create_artificial_label ();
1466 set_eh_region_tree_label (catch_region
, eh_label
);
1468 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1469 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1471 if (block_may_fallthru (CATCH_BODY (catch)))
1474 out_label
= create_artificial_label ();
1476 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1477 append_to_statement_list (x
, &CATCH_BODY (catch));
1480 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1484 frob_into_branch_around (tp
, NULL
, out_label
);
1487 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1488 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1489 region trees that record all the magic. */
1492 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1494 struct leh_state this_state
;
1495 struct eh_region
*this_region
;
1496 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1499 if (EH_FILTER_MUST_NOT_THROW (inner
))
1500 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1502 this_region
= gen_eh_region_allowed (state
->cur_region
,
1503 EH_FILTER_TYPES (inner
));
1504 this_state
= *state
;
1505 this_state
.cur_region
= this_region
;
1507 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1509 if (!get_eh_region_may_contain_throw (this_region
))
1511 *tp
= TREE_OPERAND (*tp
, 0);
1515 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1516 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1518 eh_label
= create_artificial_label ();
1519 set_eh_region_tree_label (this_region
, eh_label
);
1521 frob_into_branch_around (tp
, eh_label
, NULL
);
1524 /* Implement a cleanup expression. This is similar to try-finally,
1525 except that we only execute the cleanup block for exception edges. */
1528 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1530 struct leh_state this_state
;
1531 struct eh_region
*this_region
;
1532 struct leh_tf_state fake_tf
;
1534 /* If not using eh, then exception-only cleanups are no-ops. */
1535 if (!flag_exceptions
)
1537 *tp
= TREE_OPERAND (*tp
, 0);
1538 lower_eh_constructs_1 (state
, tp
);
1542 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1543 this_state
= *state
;
1544 this_state
.cur_region
= this_region
;
1546 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1548 if (!get_eh_region_may_contain_throw (this_region
))
1550 *tp
= TREE_OPERAND (*tp
, 0);
1554 /* Build enough of a try-finally state so that we can reuse
1555 honor_protect_cleanup_actions. */
1556 memset (&fake_tf
, 0, sizeof (fake_tf
));
1558 fake_tf
.outer
= state
;
1559 fake_tf
.region
= this_region
;
1560 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1561 fake_tf
.may_throw
= true;
1563 fake_tf
.eh_label
= create_artificial_label ();
1564 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1566 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1568 if (fake_tf
.may_throw
)
1570 /* In this case honor_protect_cleanup_actions had nothing to do,
1571 and we should process this normally. */
1572 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1573 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1577 /* In this case honor_protect_cleanup_actions did nearly all of
1578 the work. All we have left is to append the fallthru_label. */
1580 *tp
= TREE_OPERAND (*tp
, 0);
1581 if (fake_tf
.fallthru_label
)
1583 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1584 append_to_statement_list (x
, tp
);
1589 /* Main loop for lowering eh constructs. */
1592 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1594 tree_stmt_iterator i
;
1597 switch (TREE_CODE (t
))
1600 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1601 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1605 /* Look for things that can throw exceptions, and record them. */
1606 if (state
->cur_region
&& tree_could_throw_p (t
))
1608 record_stmt_eh_region (state
->cur_region
, t
);
1609 note_eh_region_may_contain_throw (state
->cur_region
);
1614 /* Look for things that can throw exceptions, and record them. */
1615 if (state
->cur_region
&& tree_could_throw_p (t
))
1617 record_stmt_eh_region (state
->cur_region
, t
);
1618 note_eh_region_may_contain_throw (state
->cur_region
);
1624 maybe_record_in_goto_queue (state
, t
);
1627 verify_norecord_switch_expr (state
, t
);
1630 case TRY_FINALLY_EXPR
:
1631 lower_try_finally (state
, tp
);
1634 case TRY_CATCH_EXPR
:
1635 i
= tsi_start (TREE_OPERAND (t
, 1));
1636 switch (TREE_CODE (tsi_stmt (i
)))
1639 lower_catch (state
, tp
);
1641 case EH_FILTER_EXPR
:
1642 lower_eh_filter (state
, tp
);
1645 lower_cleanup (state
, tp
);
1650 case STATEMENT_LIST
:
1651 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1653 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1655 if (TREE_CODE (t
) == STATEMENT_LIST
)
1657 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1666 /* A type, a decl, or some kind of statement that we're not
1667 interested in. Don't walk them. */
1673 lower_eh_constructs (void)
1675 struct leh_state null_state
;
1676 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1678 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1680 collect_finally_tree (*tp
, NULL
);
1682 memset (&null_state
, 0, sizeof (null_state
));
1683 lower_eh_constructs_1 (&null_state
, tp
);
1685 htab_delete (finally_tree
);
1687 collect_eh_region_array ();
1690 struct tree_opt_pass pass_lower_eh
=
1694 lower_eh_constructs
, /* execute */
1697 0, /* static_pass_number */
1698 TV_TREE_EH
, /* tv_id */
1699 PROP_gimple_lcf
, /* properties_required */
1700 PROP_gimple_leh
, /* properties_provided */
1701 PROP_gimple_lcf
, /* properties_destroyed */
1702 0, /* todo_flags_start */
1703 TODO_dump_func
, /* todo_flags_finish */
1708 /* Construct EH edges for STMT. */
1711 make_eh_edge (struct eh_region
*region
, void *data
)
1714 basic_block src
, dst
;
1717 lab
= get_eh_region_tree_label (region
);
1719 src
= bb_for_stmt (stmt
);
1720 dst
= label_to_block (lab
);
1722 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1726 make_eh_edges (tree stmt
)
1731 if (TREE_CODE (stmt
) == RESX_EXPR
)
1733 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1738 region_nr
= lookup_stmt_eh_region (stmt
);
1744 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1747 static bool mark_eh_edge_found_error
;
1749 /* Mark edge make_eh_edge would create for given region by setting it aux
1750 field, output error if something goes wrong. */
1752 mark_eh_edge (struct eh_region
*region
, void *data
)
1755 basic_block src
, dst
;
1759 lab
= get_eh_region_tree_label (region
);
1761 src
= bb_for_stmt (stmt
);
1762 dst
= label_to_block (lab
);
1764 e
= find_edge (src
, dst
);
1767 error ("EH edge %i->%i is missing", src
->index
, dst
->index
);
1768 mark_eh_edge_found_error
= true;
1770 else if (!(e
->flags
& EDGE_EH
))
1772 error ("EH edge %i->%i miss EH flag", src
->index
, dst
->index
);
1773 mark_eh_edge_found_error
= true;
1777 /* ??? might not be mistake. */
1778 error ("EH edge %i->%i has duplicated regions", src
->index
, dst
->index
);
1779 mark_eh_edge_found_error
= true;
1785 /* Verify that BB containing stmt as last stmt has precisely the edges
1786 make_eh_edges would create. */
1788 verify_eh_edges (tree stmt
)
1792 basic_block bb
= bb_for_stmt (stmt
);
1796 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1797 gcc_assert (!e
->aux
);
1798 mark_eh_edge_found_error
= false;
1799 if (TREE_CODE (stmt
) == RESX_EXPR
)
1801 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1806 region_nr
= lookup_stmt_eh_region (stmt
);
1809 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1810 if (e
->flags
& EDGE_EH
)
1812 error ("BB %i can not throw but has EH edges", bb
->index
);
1817 if (!tree_could_throw_p (stmt
))
1819 error ("BB %i last statement has incorrectly set region", bb
->index
);
1825 foreach_reachable_handler (region_nr
, is_resx
, mark_eh_edge
, stmt
);
1826 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1828 if ((e
->flags
& EDGE_EH
) && !e
->aux
)
1830 error ("unnecessary EH edge %i->%i", bb
->index
, e
->dest
->index
);
1831 mark_eh_edge_found_error
= true;
1836 return mark_eh_edge_found_error
;
1840 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1841 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1842 This routine expects only GIMPLE lhs or rhs input. */
1845 tree_could_trap_p (tree expr
)
1847 enum tree_code code
= TREE_CODE (expr
);
1848 bool honor_nans
= false;
1849 bool honor_snans
= false;
1850 bool fp_operation
= false;
1851 bool honor_trapv
= false;
1854 if (TREE_CODE_CLASS (code
) == tcc_comparison
1855 || TREE_CODE_CLASS (code
) == tcc_unary
1856 || TREE_CODE_CLASS (code
) == tcc_binary
)
1858 t
= TREE_TYPE (expr
);
1859 fp_operation
= FLOAT_TYPE_P (t
);
1862 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1863 honor_snans
= flag_signaling_nans
!= 0;
1865 else if (INTEGRAL_TYPE_P (t
) && TYPE_TRAP_SIGNED (t
))
1872 case TARGET_MEM_REF
:
1873 /* For TARGET_MEM_REFs use the information based on the original
1875 expr
= TMR_ORIGINAL (expr
);
1876 code
= TREE_CODE (expr
);
1883 case WITH_SIZE_EXPR
:
1884 expr
= TREE_OPERAND (expr
, 0);
1885 code
= TREE_CODE (expr
);
1888 case ARRAY_RANGE_REF
:
1889 /* Let us be conservative here for now. We might be checking bounds of
1890 the access similarly to the case below. */
1891 if (!TREE_THIS_NOTRAP (expr
))
1894 base
= TREE_OPERAND (expr
, 0);
1895 return tree_could_trap_p (base
);
1898 base
= TREE_OPERAND (expr
, 0);
1899 if (tree_could_trap_p (base
))
1902 if (TREE_THIS_NOTRAP (expr
))
1905 return !in_array_bounds_p (expr
);
1908 case ALIGN_INDIRECT_REF
:
1909 case MISALIGNED_INDIRECT_REF
:
1910 return !TREE_THIS_NOTRAP (expr
);
1913 return TREE_THIS_VOLATILE (expr
);
1915 case TRUNC_DIV_EXPR
:
1917 case FLOOR_DIV_EXPR
:
1918 case ROUND_DIV_EXPR
:
1919 case EXACT_DIV_EXPR
:
1921 case FLOOR_MOD_EXPR
:
1922 case ROUND_MOD_EXPR
:
1923 case TRUNC_MOD_EXPR
:
1925 if (honor_snans
|| honor_trapv
)
1928 return flag_trapping_math
;
1929 t
= TREE_OPERAND (expr
, 1);
1930 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1939 /* Some floating point comparisons may trap. */
1944 case UNORDERED_EXPR
:
1954 case FIX_TRUNC_EXPR
:
1956 case FIX_FLOOR_EXPR
:
1957 case FIX_ROUND_EXPR
:
1958 /* Conversion of floating point might trap. */
1964 /* These operations don't trap with floating point. */
1972 /* Any floating arithmetic may trap. */
1973 if (fp_operation
&& flag_trapping_math
)
1980 t
= get_callee_fndecl (expr
);
1981 /* Assume that calls to weak functions may trap. */
1982 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1987 /* Any floating arithmetic may trap. */
1988 if (fp_operation
&& flag_trapping_math
)
1995 tree_could_throw_p (tree t
)
1997 if (!flag_exceptions
)
1999 if (TREE_CODE (t
) == MODIFY_EXPR
)
2001 if (flag_non_call_exceptions
2002 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2004 t
= TREE_OPERAND (t
, 1);
2007 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2008 t
= TREE_OPERAND (t
, 0);
2009 if (TREE_CODE (t
) == CALL_EXPR
)
2010 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2011 if (flag_non_call_exceptions
)
2012 return tree_could_trap_p (t
);
2017 tree_can_throw_internal (tree stmt
)
2020 bool is_resx
= false;
2022 if (TREE_CODE (stmt
) == RESX_EXPR
)
2023 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2025 region_nr
= lookup_stmt_eh_region (stmt
);
2028 return can_throw_internal_1 (region_nr
, is_resx
);
2032 tree_can_throw_external (tree stmt
)
2035 bool is_resx
= false;
2037 if (TREE_CODE (stmt
) == RESX_EXPR
)
2038 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0)), is_resx
= true;
2040 region_nr
= lookup_stmt_eh_region (stmt
);
2042 return tree_could_throw_p (stmt
);
2044 return can_throw_external_1 (region_nr
, is_resx
);
2047 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2048 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2049 in the table if it should be in there. Return TRUE if a replacement was
2050 done that my require an EH edge purge. */
2053 maybe_clean_or_replace_eh_stmt (tree old_stmt
, tree new_stmt
)
2055 int region_nr
= lookup_stmt_eh_region (old_stmt
);
2059 bool new_stmt_could_throw
= tree_could_throw_p (new_stmt
);
2061 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2064 remove_stmt_from_eh_region (old_stmt
);
2065 if (new_stmt_could_throw
)
2067 add_stmt_to_eh_region (new_stmt
, region_nr
);