1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 extern int using_eh_for_cleanups_p
;
43 /* Misc functions used in this file. */
45 /* Compare and hash for any structure which begins with a canonical
46 pointer. Assumes all pointers are interchangable, which is sort
47 of already assumed by gcc elsewhere IIRC. */
50 struct_ptr_eq (const void *a
, const void *b
)
52 const void * const * x
= a
;
53 const void * const * y
= b
;
58 struct_ptr_hash (const void *a
)
60 const void * const * x
= a
;
61 return (size_t)*x
>> 4;
65 /* Remember and lookup EH region data for arbitrary statements.
66 Really this means any statement that could_throw_p. We could
67 stuff this information into the stmt_ann data structure, but:
69 (1) We absolutely rely on this information being kept until
70 we get to rtl. Once we're done with lowering here, if we lose
71 the information there's no way to recover it!
73 (2) There are many more statements that *cannot* throw as
74 compared to those that can. We should be saving some amount
75 of space by only allocating memory for those that can throw. */
77 struct throw_stmt_node
GTY(())
83 static GTY((param_is (struct throw_stmt_node
))) htab_t throw_stmt_table
;
86 record_stmt_eh_region (struct eh_region
*region
, tree t
)
88 struct throw_stmt_node
*n
;
94 n
= ggc_alloc (sizeof (*n
));
96 n
->region_nr
= get_eh_region_number (region
);
98 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
105 add_stmt_to_eh_region (tree t
, int num
)
107 struct throw_stmt_node
*n
;
113 n
= ggc_alloc (sizeof (*n
));
117 slot
= htab_find_slot (throw_stmt_table
, n
, INSERT
);
124 remove_stmt_from_eh_region (tree t
)
126 struct throw_stmt_node dummy
;
129 if (!throw_stmt_table
)
133 slot
= htab_find_slot (throw_stmt_table
, &dummy
, NO_INSERT
);
136 htab_clear_slot (throw_stmt_table
, slot
);
144 lookup_stmt_eh_region (tree t
)
146 struct throw_stmt_node
*p
, n
;
148 if (!throw_stmt_table
)
152 p
= htab_find (throw_stmt_table
, &n
);
154 return (p
? p
->region_nr
: -1);
159 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
160 nodes and LABEL_DECL nodes. We will use this during the second phase to
161 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
163 struct finally_tree_node
168 /* Note that this table is *not* marked GTY. It is short-lived. */
169 static htab_t finally_tree
;
172 record_in_finally_tree (tree child
, tree parent
)
174 struct finally_tree_node
*n
;
177 n
= xmalloc (sizeof (*n
));
181 slot
= htab_find_slot (finally_tree
, n
, INSERT
);
188 collect_finally_tree (tree t
, tree region
)
191 switch (TREE_CODE (t
))
194 record_in_finally_tree (LABEL_EXPR_LABEL (t
), region
);
197 case TRY_FINALLY_EXPR
:
198 record_in_finally_tree (t
, region
);
199 collect_finally_tree (TREE_OPERAND (t
, 0), t
);
200 t
= TREE_OPERAND (t
, 1);
204 collect_finally_tree (TREE_OPERAND (t
, 0), region
);
205 t
= TREE_OPERAND (t
, 1);
213 t
= EH_FILTER_FAILURE (t
);
218 tree_stmt_iterator i
;
219 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
220 collect_finally_tree (tsi_stmt (i
), region
);
225 /* A type, a decl, or some kind of statement that we're not
226 interested in. Don't walk them. */
231 /* Use the finally tree to determine if a jump from START to TARGET
232 would leave the try_finally node that START lives in. */
235 outside_finally_tree (tree start
, tree target
)
237 struct finally_tree_node n
, *p
;
242 p
= htab_find (finally_tree
, &n
);
247 while (start
!= target
);
252 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
253 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
254 The eh region creation is straight-forward, but frobbing all the gotos
255 and such into shape isn't. */
257 /* State of the world while lowering. */
261 /* What's "current" while constructing the eh region tree. These
262 correspond to variables of the same name in cfun->eh, which we
263 don't have easy access to. */
264 struct eh_region
*cur_region
;
265 struct eh_region
*prev_try
;
267 /* Processing of TRY_FINALLY requires a bit more state. This is
268 split out into a separate structure so that we don't have to
269 copy so much when processing other nodes. */
270 struct leh_tf_state
*tf
;
275 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
276 is the original TRY_FINALLY_EXPR. We need to retain this so that
277 outside_finally_tree can reliably reference the tree used in the
278 collect_finally_tree data structures. */
279 tree try_finally_expr
;
282 /* The state outside this try_finally node. */
283 struct leh_state
*outer
;
285 /* The exception region created for it. */
286 struct eh_region
*region
;
288 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
289 that are seen to escape this TRY_FINALLY_EXPR node. */
290 struct goto_queue_node
{
296 size_t goto_queue_size
;
297 size_t goto_queue_active
;
299 /* The set of unique labels seen as entries in the goto queue. */
300 varray_type dest_array
;
302 /* A label to be added at the end of the completed transformed
303 sequence. It will be set if may_fallthru was true *at one time*,
304 though subsequent transformations may have cleared that flag. */
307 /* A label that has been registered with except.c to be the
308 landing pad for this try block. */
311 /* True if it is possible to fall out the bottom of the try block.
312 Cleared if the fallthru is converted to a goto. */
315 /* True if any entry in goto_queue is a RETURN_EXPR. */
318 /* True if the finally block can receive an exception edge.
319 Cleared if the exception case is handled by code duplication. */
323 static void lower_eh_filter (struct leh_state
*, tree
*);
324 static void lower_eh_constructs_1 (struct leh_state
*, tree
*);
326 /* Comparison function for qsort/bsearch. We're interested in
327 searching goto queue elements for source statements. */
330 goto_queue_cmp (const void *x
, const void *y
)
332 tree a
= ((const struct goto_queue_node
*)x
)->stmt
;
333 tree b
= ((const struct goto_queue_node
*)y
)->stmt
;
334 return (a
== b
? 0 : a
< b
? -1 : 1);
337 /* Search for STMT in the goto queue. Return the replacement,
338 or null if the statement isn't in the queue. */
341 find_goto_replacement (struct leh_tf_state
*tf
, tree stmt
)
343 struct goto_queue_node tmp
, *ret
;
345 ret
= bsearch (&tmp
, tf
->goto_queue
, tf
->goto_queue_active
,
346 sizeof (struct goto_queue_node
), goto_queue_cmp
);
347 return (ret
? ret
->repl_stmt
: NULL
);
350 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
351 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
352 then we can just splat it in, otherwise we add the new stmts immediately
353 after the COND_EXPR and redirect. */
356 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
357 tree_stmt_iterator
*tsi
)
359 tree
new, one
, label
;
361 new = find_goto_replacement (tf
, *tp
);
365 one
= expr_only (new);
366 if (one
&& TREE_CODE (one
) == GOTO_EXPR
)
372 label
= build1 (LABEL_EXPR
, void_type_node
, NULL_TREE
);
373 *tp
= build_and_jump (&LABEL_EXPR_LABEL (label
));
375 tsi_link_after (tsi
, label
, TSI_CONTINUE_LINKING
);
376 tsi_link_after (tsi
, new, TSI_CONTINUE_LINKING
);
379 /* The real work of replace_goto_queue. Returns with TSI updated to
380 point to the next statement. */
382 static void replace_goto_queue_stmt_list (tree
, struct leh_tf_state
*);
385 replace_goto_queue_1 (tree t
, struct leh_tf_state
*tf
, tree_stmt_iterator
*tsi
)
387 switch (TREE_CODE (t
))
391 t
= find_goto_replacement (tf
, t
);
394 tsi_link_before (tsi
, t
, TSI_SAME_STMT
);
401 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t
), tf
, tsi
);
402 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t
), tf
, tsi
);
405 case TRY_FINALLY_EXPR
:
407 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 0), tf
);
408 replace_goto_queue_stmt_list (TREE_OPERAND (t
, 1), tf
);
411 replace_goto_queue_stmt_list (CATCH_BODY (t
), tf
);
414 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t
), tf
);
421 /* These won't have gotos in them. */
428 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
431 replace_goto_queue_stmt_list (tree t
, struct leh_tf_state
*tf
)
433 tree_stmt_iterator i
= tsi_start (t
);
434 while (!tsi_end_p (i
))
435 replace_goto_queue_1 (tsi_stmt (i
), tf
, &i
);
438 /* Replace all goto queue members. */
441 replace_goto_queue (struct leh_tf_state
*tf
)
443 replace_goto_queue_stmt_list (*tf
->top_p
, tf
);
446 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
447 node, and if so record that fact in the goto queue associated with that
451 maybe_record_in_goto_queue (struct leh_state
*state
, tree stmt
)
453 struct leh_tf_state
*tf
= state
->tf
;
454 struct goto_queue_node
*q
;
461 switch (TREE_CODE (stmt
))
465 tree lab
= GOTO_DESTINATION (stmt
);
467 /* Computed and non-local gotos do not get processed. Given
468 their nature we can neither tell whether we've escaped the
469 finally block nor redirect them if we knew. */
470 if (TREE_CODE (lab
) != LABEL_DECL
)
473 /* No need to record gotos that don't leave the try block. */
474 if (! outside_finally_tree (lab
, tf
->try_finally_expr
))
477 if (! tf
->dest_array
)
479 VARRAY_TREE_INIT (tf
->dest_array
, 10, "dest_array");
480 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
485 int n
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
486 for (index
= 0; index
< n
; ++index
)
487 if (VARRAY_TREE (tf
->dest_array
, index
) == lab
)
490 VARRAY_PUSH_TREE (tf
->dest_array
, lab
);
496 tf
->may_return
= true;
504 active
= tf
->goto_queue_active
;
505 size
= tf
->goto_queue_size
;
508 size
= (size
? size
* 2 : 32);
509 tf
->goto_queue_size
= size
;
511 = xrealloc (tf
->goto_queue
, size
* sizeof (struct goto_queue_node
));
514 q
= &tf
->goto_queue
[active
];
515 tf
->goto_queue_active
= active
+ 1;
517 memset (q
, 0, sizeof (*q
));
522 #ifdef ENABLE_CHECKING
523 /* We do not process SWITCH_EXPRs for now. As long as the original source
524 was in fact structured, and we've not yet done jump threading, then none
525 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
528 verify_norecord_switch_expr (struct leh_state
*state
, tree switch_expr
)
530 struct leh_tf_state
*tf
= state
->tf
;
537 vec
= SWITCH_LABELS (switch_expr
);
538 n
= TREE_VEC_LENGTH (vec
);
540 for (i
= 0; i
< n
; ++i
)
542 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
543 if (outside_finally_tree (lab
, tf
->try_finally_expr
))
548 #define verify_norecord_switch_expr(state, switch_expr)
551 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
552 whatever is needed to finish the return. If MOD is non-null, insert it
553 before the new branch. RETURN_VALUE_P is a cache containing a temporary
554 variable to be used in manipulating the value returned from the function. */
557 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
,
558 tree
*return_value_p
)
560 tree ret_expr
= TREE_OPERAND (q
->stmt
, 0);
565 /* The nasty part about redirecting the return value is that the
566 return value itself is to be computed before the FINALLY block
580 should return 0, not 1. Arrange for this to happen by copying
581 computed the return value into a local temporary. This also
582 allows us to redirect multiple return statements through the
583 same destination block; whether this is a net win or not really
584 depends, I guess, but it does make generation of the switch in
585 lower_try_finally_switch easier. */
587 if (TREE_CODE (ret_expr
) == RESULT_DECL
)
589 if (!*return_value_p
)
590 *return_value_p
= ret_expr
;
591 else if (*return_value_p
!= ret_expr
)
593 q
->cont_stmt
= q
->stmt
;
595 else if (TREE_CODE (ret_expr
) == MODIFY_EXPR
)
597 tree result
= TREE_OPERAND (ret_expr
, 0);
598 tree
new, old
= TREE_OPERAND (ret_expr
, 1);
600 if (!*return_value_p
)
602 if (aggregate_value_p (TREE_TYPE (result
),
603 TREE_TYPE (current_function_decl
)))
604 /* If this function returns in memory, copy the argument
605 into the return slot now. Otherwise, we might need to
606 worry about magic return semantics, so we need to use a
607 temporary to hold the value until we're actually ready
611 new = create_tmp_var (TREE_TYPE (old
), "rettmp");
612 *return_value_p
= new;
615 new = *return_value_p
;
617 x
= build (MODIFY_EXPR
, TREE_TYPE (new), new, old
);
618 append_to_statement_list (x
, &q
->repl_stmt
);
623 x
= build (MODIFY_EXPR
, TREE_TYPE (result
), result
, new);
624 q
->cont_stmt
= build1 (RETURN_EXPR
, void_type_node
, x
);
631 /* If we don't return a value, all return statements are the same. */
632 q
->cont_stmt
= q
->stmt
;
636 append_to_statement_list (mod
, &q
->repl_stmt
);
638 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
639 append_to_statement_list (x
, &q
->repl_stmt
);
642 /* Similar, but easier, for GOTO_EXPR. */
645 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, tree mod
)
649 q
->cont_stmt
= q
->stmt
;
651 append_to_statement_list (mod
, &q
->repl_stmt
);
653 x
= build1 (GOTO_EXPR
, void_type_node
, finlab
);
654 append_to_statement_list (x
, &q
->repl_stmt
);
657 /* We want to transform
658 try { body; } catch { stuff; }
660 body; goto over; lab: stuff; over:
662 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
663 should be placed before the second operand, or NULL. OVER is
664 an existing label that should be put at the exit, or NULL. */
667 frob_into_branch_around (tree
*tp
, tree lab
, tree over
)
671 op1
= TREE_OPERAND (*tp
, 1);
672 *tp
= TREE_OPERAND (*tp
, 0);
674 if (block_may_fallthru (*tp
))
677 over
= create_artificial_label ();
678 x
= build1 (GOTO_EXPR
, void_type_node
, over
);
679 append_to_statement_list (x
, tp
);
684 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
685 append_to_statement_list (x
, tp
);
688 append_to_statement_list (op1
, tp
);
692 x
= build1 (LABEL_EXPR
, void_type_node
, over
);
693 append_to_statement_list (x
, tp
);
697 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
698 Make sure to record all new labels found. */
701 lower_try_finally_dup_block (tree t
, struct leh_state
*outer_state
)
705 t
= lhd_unsave_expr_now (t
);
708 region
= outer_state
->tf
->try_finally_expr
;
709 collect_finally_tree (t
, region
);
714 /* A subroutine of lower_try_finally. Create a fallthru label for
715 the given try_finally state. The only tricky bit here is that
716 we have to make sure to record the label in our outer context. */
719 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
721 tree label
= tf
->fallthru_label
;
724 label
= create_artificial_label ();
725 tf
->fallthru_label
= label
;
727 record_in_finally_tree (label
, tf
->outer
->tf
->try_finally_expr
);
732 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
733 returns non-null, then the language requires that the exception path out
734 of a try_finally be treated specially. To wit: the code within the
735 finally block may not itself throw an exception. We have two choices here.
736 First we can duplicate the finally block and wrap it in a must_not_throw
737 region. Second, we can generate code like
742 if (fintmp == eh_edge)
743 protect_cleanup_actions;
746 where "fintmp" is the temporary used in the switch statement generation
747 alternative considered below. For the nonce, we always choose the first
750 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
753 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
754 struct leh_state
*this_state
,
755 struct leh_tf_state
*tf
)
757 tree protect_cleanup_actions
, finally
, x
;
758 tree_stmt_iterator i
;
759 bool finally_may_fallthru
;
761 /* First check for nothing to do. */
762 if (lang_protect_cleanup_actions
)
763 protect_cleanup_actions
= lang_protect_cleanup_actions ();
765 protect_cleanup_actions
= NULL
;
767 finally
= TREE_OPERAND (*tf
->top_p
, 1);
769 /* If the EH case of the finally block can fall through, this may be a
770 structure of the form
783 E.g. with an inline destructor with an embedded try block. In this
784 case we must save the runtime EH data around the nested exception.
786 This complication means that any time the previous runtime data might
787 be used (via fallthru from the finally) we handle the eh case here,
788 whether or not protect_cleanup_actions is active. */
790 finally_may_fallthru
= block_may_fallthru (finally
);
791 if (!finally_may_fallthru
&& !protect_cleanup_actions
)
794 /* Duplicate the FINALLY block. Only need to do this for try-finally,
795 and not for cleanups. */
797 finally
= lower_try_finally_dup_block (finally
, outer_state
);
799 /* Resume execution after the exception. Adding this now lets
800 lower_eh_filter not add unnecessary gotos, as it is clear that
801 we never fallthru from this copy of the finally block. */
802 if (finally_may_fallthru
)
804 tree save_eptr
, save_filt
;
806 save_eptr
= create_tmp_var (ptr_type_node
, "save_eptr");
807 save_filt
= create_tmp_var (integer_type_node
, "save_filt");
809 i
= tsi_start (finally
);
810 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
811 x
= build (MODIFY_EXPR
, void_type_node
, save_eptr
, x
);
812 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
814 x
= build (FILTER_EXPR
, integer_type_node
);
815 x
= build (MODIFY_EXPR
, void_type_node
, save_filt
, x
);
816 tsi_link_before (&i
, x
, TSI_CONTINUE_LINKING
);
818 i
= tsi_last (finally
);
819 x
= build (EXC_PTR_EXPR
, ptr_type_node
);
820 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_eptr
);
821 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
823 x
= build (FILTER_EXPR
, integer_type_node
);
824 x
= build (MODIFY_EXPR
, void_type_node
, x
, save_filt
);
825 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
827 x
= build1 (RESX_EXPR
, void_type_node
,
828 build_int_2 (get_eh_region_number (tf
->region
), 0));
829 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
832 /* Wrap the block with protect_cleanup_actions as the action. */
833 if (protect_cleanup_actions
)
835 x
= build (EH_FILTER_EXPR
, void_type_node
, NULL
, NULL
);
836 append_to_statement_list (protect_cleanup_actions
, &EH_FILTER_FAILURE (x
));
837 EH_FILTER_MUST_NOT_THROW (x
) = 1;
838 finally
= build (TRY_CATCH_EXPR
, void_type_node
, finally
, x
);
839 lower_eh_filter (outer_state
, &finally
);
842 lower_eh_constructs_1 (outer_state
, &finally
);
844 /* Hook this up to the end of the existing try block. If we
845 previously fell through the end, we'll have to branch around.
846 This means adding a new goto, and adding it to the queue. */
848 i
= tsi_last (TREE_OPERAND (*tf
->top_p
, 0));
850 if (tf
->may_fallthru
)
852 x
= lower_try_finally_fallthru_label (tf
);
853 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
854 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
857 maybe_record_in_goto_queue (this_state
, x
);
859 tf
->may_fallthru
= false;
862 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
863 tsi_link_after (&i
, x
, TSI_CONTINUE_LINKING
);
864 tsi_link_after (&i
, finally
, TSI_CONTINUE_LINKING
);
866 /* Having now been handled, EH isn't to be considered with
867 the rest of the outgoing edges. */
868 tf
->may_throw
= false;
871 /* A subroutine of lower_try_finally. We have determined that there is
872 no fallthru edge out of the finally block. This means that there is
873 no outgoing edge corresponding to any incoming edge. Restructure the
874 try_finally node for this special case. */
877 lower_try_finally_nofallthru (struct leh_state
*state
, struct leh_tf_state
*tf
)
879 tree x
, finally
, lab
, return_val
;
880 struct goto_queue_node
*q
, *qe
;
885 lab
= create_artificial_label ();
887 finally
= TREE_OPERAND (*tf
->top_p
, 1);
888 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
890 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
891 append_to_statement_list (x
, tf
->top_p
);
895 qe
= q
+ tf
->goto_queue_active
;
898 do_return_redirection (q
, lab
, NULL
, &return_val
);
900 do_goto_redirection (q
, lab
, NULL
);
902 replace_goto_queue (tf
);
904 lower_eh_constructs_1 (state
, &finally
);
905 append_to_statement_list (finally
, tf
->top_p
);
908 /* A subroutine of lower_try_finally. We have determined that there is
909 exactly one destination of the finally block. Restructure the
910 try_finally node for this special case. */
913 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
915 struct goto_queue_node
*q
, *qe
;
916 tree x
, finally
, finally_label
;
918 finally
= TREE_OPERAND (*tf
->top_p
, 1);
919 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
921 lower_eh_constructs_1 (state
, &finally
);
925 /* Only reachable via the exception edge. Add the given label to
926 the head of the FINALLY block. Append a RESX at the end. */
928 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
929 append_to_statement_list (x
, tf
->top_p
);
931 append_to_statement_list (finally
, tf
->top_p
);
933 x
= build1 (RESX_EXPR
, void_type_node
,
934 build_int_2 (get_eh_region_number (tf
->region
), 0));
935 append_to_statement_list (x
, tf
->top_p
);
940 if (tf
->may_fallthru
)
942 /* Only reachable via the fallthru edge. Do nothing but let
943 the two blocks run together; we'll fall out the bottom. */
944 append_to_statement_list (finally
, tf
->top_p
);
948 finally_label
= create_artificial_label ();
949 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
950 append_to_statement_list (x
, tf
->top_p
);
952 append_to_statement_list (finally
, tf
->top_p
);
955 qe
= q
+ tf
->goto_queue_active
;
959 /* Reachable by return expressions only. Redirect them. */
960 tree return_val
= NULL
;
962 do_return_redirection (q
, finally_label
, NULL
, &return_val
);
963 replace_goto_queue (tf
);
967 /* Reachable by goto expressions only. Redirect them. */
969 do_goto_redirection (q
, finally_label
, NULL
);
970 replace_goto_queue (tf
);
972 if (VARRAY_TREE (tf
->dest_array
, 0) == tf
->fallthru_label
)
974 /* Reachable by goto to fallthru label only. Redirect it
975 to the new label (already created, sadly), and do not
976 emit the final branch out, or the fallthru label. */
977 tf
->fallthru_label
= NULL
;
982 append_to_statement_list (tf
->goto_queue
[0].cont_stmt
, tf
->top_p
);
983 maybe_record_in_goto_queue (state
, tf
->goto_queue
[0].cont_stmt
);
986 /* A subroutine of lower_try_finally. There are multiple edges incoming
987 and outgoing from the finally block. Implement this by duplicating the
988 finally block for every destination. */
991 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
993 tree finally
, new_stmt
;
996 finally
= TREE_OPERAND (*tf
->top_p
, 1);
997 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
999 new_stmt
= NULL_TREE
;
1001 if (tf
->may_fallthru
)
1003 x
= lower_try_finally_dup_block (finally
, state
);
1004 lower_eh_constructs_1 (state
, &x
);
1005 append_to_statement_list (x
, &new_stmt
);
1007 x
= lower_try_finally_fallthru_label (tf
);
1008 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1009 append_to_statement_list (x
, &new_stmt
);
1014 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1015 append_to_statement_list (x
, &new_stmt
);
1017 x
= lower_try_finally_dup_block (finally
, state
);
1018 lower_eh_constructs_1 (state
, &x
);
1019 append_to_statement_list (x
, &new_stmt
);
1021 x
= build1 (RESX_EXPR
, void_type_node
,
1022 build_int_2 (get_eh_region_number (tf
->region
), 0));
1023 append_to_statement_list (x
, &new_stmt
);
1028 struct goto_queue_node
*q
, *qe
;
1029 tree return_val
= NULL
;
1034 return_index
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1037 labels
= xcalloc (sizeof (tree
), return_index
+ 1);
1040 qe
= q
+ tf
->goto_queue_active
;
1043 int index
= q
->index
< 0 ? return_index
: q
->index
;
1044 tree lab
= labels
[index
];
1045 bool build_p
= false;
1049 labels
[index
] = lab
= create_artificial_label ();
1053 if (index
== return_index
)
1054 do_return_redirection (q
, lab
, NULL
, &return_val
);
1056 do_goto_redirection (q
, lab
, NULL
);
1060 x
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1061 append_to_statement_list (x
, &new_stmt
);
1063 x
= lower_try_finally_dup_block (finally
, state
);
1064 lower_eh_constructs_1 (state
, &x
);
1065 append_to_statement_list (x
, &new_stmt
);
1067 append_to_statement_list (q
->cont_stmt
, &new_stmt
);
1068 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1071 replace_goto_queue (tf
);
1075 /* Need to link new stmts after running replace_goto_queue due
1076 to not wanting to process the same goto stmts twice. */
1077 append_to_statement_list (new_stmt
, tf
->top_p
);
1080 /* A subroutine of lower_try_finally. There are multiple edges incoming
1081 and outgoing from the finally block. Implement this by instrumenting
1082 each incoming edge and creating a switch statement at the end of the
1083 finally block that branches to the appropriate destination. */
1086 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1088 struct goto_queue_node
*q
, *qe
;
1089 tree return_val
= NULL
;
1090 tree finally
, finally_tmp
, finally_label
;
1091 int return_index
, eh_index
, fallthru_index
;
1092 int nlabels
, ndests
, j
, last_case_index
;
1093 tree case_label_vec
, switch_stmt
, last_case
, switch_body
;
1096 /* Mash the TRY block to the head of the chain. */
1097 finally
= TREE_OPERAND (*tf
->top_p
, 1);
1098 *tf
->top_p
= TREE_OPERAND (*tf
->top_p
, 0);
1100 /* Lower the finally block itself. */
1101 lower_eh_constructs_1 (state
, &finally
);
1103 /* Prepare for switch statement generation. */
1105 nlabels
= VARRAY_ACTIVE_SIZE (tf
->dest_array
);
1108 return_index
= nlabels
;
1109 eh_index
= return_index
+ tf
->may_return
;
1110 fallthru_index
= eh_index
+ tf
->may_throw
;
1111 ndests
= fallthru_index
+ tf
->may_fallthru
;
1113 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1114 finally_label
= create_artificial_label ();
1116 case_label_vec
= make_tree_vec (ndests
);
1117 switch_stmt
= build (SWITCH_EXPR
, integer_type_node
, finally_tmp
,
1118 NULL_TREE
, case_label_vec
);
1121 last_case_index
= 0;
1123 /* Begin inserting code for getting to the finally block. Things
1124 are done in this order to correspond to the sequence the code is
1127 if (tf
->may_fallthru
)
1129 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1130 build_int_2 (fallthru_index
, 0));
1131 append_to_statement_list (x
, tf
->top_p
);
1135 x
= build1 (GOTO_EXPR
, void_type_node
, finally_label
);
1136 append_to_statement_list (x
, tf
->top_p
);
1140 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1141 build_int_2 (fallthru_index
, 0), NULL
,
1142 create_artificial_label ());
1143 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1146 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1147 append_to_statement_list (x
, &switch_body
);
1149 x
= lower_try_finally_fallthru_label (tf
);
1150 x
= build1 (GOTO_EXPR
, void_type_node
, x
);
1151 append_to_statement_list (x
, &switch_body
);
1156 x
= build1 (LABEL_EXPR
, void_type_node
, tf
->eh_label
);
1157 append_to_statement_list (x
, tf
->top_p
);
1159 x
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1160 build_int_2 (eh_index
, 0));
1161 append_to_statement_list (x
, tf
->top_p
);
1163 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1164 build_int_2 (eh_index
, 0), NULL
,
1165 create_artificial_label ());
1166 TREE_VEC_ELT (case_label_vec
, last_case_index
) = last_case
;
1169 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1170 append_to_statement_list (x
, &switch_body
);
1171 x
= build1 (RESX_EXPR
, void_type_node
,
1172 build_int_2 (get_eh_region_number (tf
->region
), 0));
1173 append_to_statement_list (x
, &switch_body
);
1176 x
= build1 (LABEL_EXPR
, void_type_node
, finally_label
);
1177 append_to_statement_list (x
, tf
->top_p
);
1179 append_to_statement_list (finally
, tf
->top_p
);
1181 /* Redirect each incoming goto edge. */
1183 qe
= q
+ tf
->goto_queue_active
;
1184 j
= last_case_index
+ tf
->may_return
;
1185 last_case_index
+= nlabels
;
1189 int switch_id
, case_index
;
1193 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1194 build_int_2 (return_index
, 0));
1195 do_return_redirection (q
, finally_label
, mod
, &return_val
);
1196 switch_id
= return_index
;
1200 mod
= build (MODIFY_EXPR
, void_type_node
, finally_tmp
,
1201 build_int_2 (q
->index
, 0));
1202 do_goto_redirection (q
, finally_label
, mod
);
1203 switch_id
= q
->index
;
1206 case_index
= j
+ q
->index
;
1207 if (!TREE_VEC_ELT (case_label_vec
, case_index
))
1209 last_case
= build (CASE_LABEL_EXPR
, void_type_node
,
1210 build_int_2 (switch_id
, 0), NULL
,
1211 create_artificial_label ());
1212 TREE_VEC_ELT (case_label_vec
, case_index
) = last_case
;
1214 x
= build (LABEL_EXPR
, void_type_node
, CASE_LABEL (last_case
));
1215 append_to_statement_list (x
, &switch_body
);
1216 append_to_statement_list (q
->cont_stmt
, &switch_body
);
1217 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1220 replace_goto_queue (tf
);
1221 last_case_index
+= nlabels
;
1223 /* Make sure that the last case is the default label, as one is required.
1224 Then sort the labels, which is also required in GIMPLE. */
1225 CASE_LOW (last_case
) = NULL
;
1226 sort_case_labels (case_label_vec
);
1228 /* Need to link switch_stmt after running replace_goto_queue due
1229 to not wanting to process the same goto stmts twice. */
1230 append_to_statement_list (switch_stmt
, tf
->top_p
);
1231 append_to_statement_list (switch_body
, tf
->top_p
);
1234 /* Decide whether or not we are going to duplicate the finally block.
1235 There are several considerations.
1237 First, if this is Java, then the finally block contains code
1238 written by the user. It has line numbers associated with it,
1239 so duplicating the block means it's difficult to set a breakpoint.
1240 Since controlling code generation via -g is verboten, we simply
1241 never duplicate code without optimization.
1243 Second, we'd like to prevent egregious code growth. One way to
1244 do this is to estimate the size of the finally block, multiply
1245 that by the number of copies we'd need to make, and compare against
1246 the estimate of the size of the switch machinery we'd have to add. */
1249 decide_copy_try_finally (int ndests
, tree finally
)
1251 int f_estimate
, sw_estimate
;
1256 /* Finally estimate N times, plus N gotos. */
1257 f_estimate
= estimate_num_insns (finally
);
1258 f_estimate
= (f_estimate
+ 1) * ndests
;
1260 /* Switch statement (cost 10), N variable assignments, N gotos. */
1261 sw_estimate
= 10 + 2 * ndests
;
1263 /* Optimize for size clearly wants our best guess. */
1265 return f_estimate
< sw_estimate
;
1267 /* ??? These numbers are completely made up so far. */
1269 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1271 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1274 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1275 to a sequence of labels and blocks, plus the exception region trees
1276 that record all the magic. This is complicated by the need to
1277 arrange for the FINALLY block to be executed on all exits. */
1280 lower_try_finally (struct leh_state
*state
, tree
*tp
)
1282 struct leh_tf_state this_tf
;
1283 struct leh_state this_state
;
1286 /* Process the try block. */
1288 memset (&this_tf
, 0, sizeof (this_tf
));
1289 this_tf
.try_finally_expr
= *tp
;
1291 this_tf
.outer
= state
;
1292 if (using_eh_for_cleanups_p
)
1294 = gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1296 this_tf
.region
= NULL
;
1298 this_state
.cur_region
= this_tf
.region
;
1299 this_state
.prev_try
= state
->prev_try
;
1300 this_state
.tf
= &this_tf
;
1302 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1304 /* Determine if the try block is escaped through the bottom. */
1305 this_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1307 /* Determine if any exceptions are possible within the try block. */
1308 if (using_eh_for_cleanups_p
)
1309 this_tf
.may_throw
= get_eh_region_may_contain_throw (this_tf
.region
);
1310 if (this_tf
.may_throw
)
1312 this_tf
.eh_label
= create_artificial_label ();
1313 set_eh_region_tree_label (this_tf
.region
, this_tf
.eh_label
);
1314 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1317 /* Sort the goto queue for efficient searching later. */
1318 if (this_tf
.goto_queue_active
> 1)
1319 qsort (this_tf
.goto_queue
, this_tf
.goto_queue_active
,
1320 sizeof (struct goto_queue_node
), goto_queue_cmp
);
1322 /* Determine how many edges (still) reach the finally block. Or rather,
1323 how many destinations are reached by the finally block. Use this to
1324 determine how we process the finally block itself. */
1326 if (this_tf
.dest_array
)
1327 ndests
= VARRAY_ACTIVE_SIZE (this_tf
.dest_array
);
1330 ndests
+= this_tf
.may_fallthru
;
1331 ndests
+= this_tf
.may_return
;
1332 ndests
+= this_tf
.may_throw
;
1334 /* If the FINALLY block is not reachable, dike it out. */
1336 *tp
= TREE_OPERAND (*tp
, 0);
1338 /* If the finally block doesn't fall through, then any destination
1339 we might try to impose there isn't reached either. There may be
1340 some minor amount of cleanup and redirection still needed. */
1341 else if (!block_may_fallthru (TREE_OPERAND (*tp
, 1)))
1342 lower_try_finally_nofallthru (state
, &this_tf
);
1344 /* We can easily special-case redirection to a single destination. */
1345 else if (ndests
== 1)
1346 lower_try_finally_onedest (state
, &this_tf
);
1348 else if (decide_copy_try_finally (ndests
, TREE_OPERAND (*tp
, 1)))
1349 lower_try_finally_copy (state
, &this_tf
);
1351 lower_try_finally_switch (state
, &this_tf
);
1353 /* If someone requested we add a label at the end of the transformed
1355 if (this_tf
.fallthru_label
)
1357 tree x
= build1 (LABEL_EXPR
, void_type_node
, this_tf
.fallthru_label
);
1358 append_to_statement_list (x
, tp
);
1361 if (this_tf
.goto_queue
)
1362 free (this_tf
.goto_queue
);
1365 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1366 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1367 exception region trees that record all the magic. */
1370 lower_catch (struct leh_state
*state
, tree
*tp
)
1372 struct eh_region
*try_region
;
1373 struct leh_state this_state
;
1374 tree_stmt_iterator i
;
1377 try_region
= gen_eh_region_try (state
->cur_region
);
1378 this_state
.cur_region
= try_region
;
1379 this_state
.prev_try
= try_region
;
1380 this_state
.tf
= state
->tf
;
1382 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1384 if (!get_eh_region_may_contain_throw (try_region
))
1386 *tp
= TREE_OPERAND (*tp
, 0);
1391 for (i
= tsi_start (TREE_OPERAND (*tp
, 1)); !tsi_end_p (i
); )
1393 struct eh_region
*catch_region
;
1394 tree
catch, x
, eh_label
;
1396 catch = tsi_stmt (i
);
1397 catch_region
= gen_eh_region_catch (try_region
, CATCH_TYPES (catch));
1399 this_state
.cur_region
= catch_region
;
1400 this_state
.prev_try
= state
->prev_try
;
1401 lower_eh_constructs_1 (&this_state
, &CATCH_BODY (catch));
1403 eh_label
= create_artificial_label ();
1404 set_eh_region_tree_label (catch_region
, eh_label
);
1406 x
= build1 (LABEL_EXPR
, void_type_node
, eh_label
);
1407 tsi_link_before (&i
, x
, TSI_SAME_STMT
);
1409 if (block_may_fallthru (CATCH_BODY (catch)))
1412 out_label
= create_artificial_label ();
1414 x
= build1 (GOTO_EXPR
, void_type_node
, out_label
);
1415 append_to_statement_list (x
, &CATCH_BODY (catch));
1418 tsi_link_before (&i
, CATCH_BODY (catch), TSI_SAME_STMT
);
1422 frob_into_branch_around (tp
, NULL
, out_label
);
1425 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1426 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1427 region trees that record all the magic. */
1430 lower_eh_filter (struct leh_state
*state
, tree
*tp
)
1432 struct leh_state this_state
;
1433 struct eh_region
*this_region
;
1434 tree inner
= expr_first (TREE_OPERAND (*tp
, 1));
1437 if (EH_FILTER_MUST_NOT_THROW (inner
))
1438 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1440 this_region
= gen_eh_region_allowed (state
->cur_region
,
1441 EH_FILTER_TYPES (inner
));
1442 this_state
= *state
;
1443 this_state
.cur_region
= this_region
;
1445 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1447 if (!get_eh_region_may_contain_throw (this_region
))
1449 *tp
= TREE_OPERAND (*tp
, 0);
1453 lower_eh_constructs_1 (state
, &EH_FILTER_FAILURE (inner
));
1454 TREE_OPERAND (*tp
, 1) = EH_FILTER_FAILURE (inner
);
1456 eh_label
= create_artificial_label ();
1457 set_eh_region_tree_label (this_region
, eh_label
);
1459 frob_into_branch_around (tp
, eh_label
, NULL
);
1462 /* Implement a cleanup expression. This is similar to try-finally,
1463 except that we only execute the cleanup block for exception edges. */
1466 lower_cleanup (struct leh_state
*state
, tree
*tp
)
1468 struct leh_state this_state
;
1469 struct eh_region
*this_region
;
1470 struct leh_tf_state fake_tf
;
1472 /* If not using eh, then exception-only cleanups are no-ops. */
1473 if (!flag_exceptions
)
1475 *tp
= TREE_OPERAND (*tp
, 0);
1476 lower_eh_constructs_1 (state
, tp
);
1480 this_region
= gen_eh_region_cleanup (state
->cur_region
, state
->prev_try
);
1481 this_state
= *state
;
1482 this_state
.cur_region
= this_region
;
1484 lower_eh_constructs_1 (&this_state
, &TREE_OPERAND (*tp
, 0));
1486 if (!get_eh_region_may_contain_throw (this_region
))
1488 *tp
= TREE_OPERAND (*tp
, 0);
1492 /* Build enough of a try-finally state so that we can reuse
1493 honor_protect_cleanup_actions. */
1494 memset (&fake_tf
, 0, sizeof (fake_tf
));
1496 fake_tf
.outer
= state
;
1497 fake_tf
.region
= this_region
;
1498 fake_tf
.may_fallthru
= block_may_fallthru (TREE_OPERAND (*tp
, 0));
1499 fake_tf
.may_throw
= true;
1501 fake_tf
.eh_label
= create_artificial_label ();
1502 set_eh_region_tree_label (this_region
, fake_tf
.eh_label
);
1504 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1506 if (fake_tf
.may_throw
)
1508 /* In this case honor_protect_cleanup_actions had nothing to do,
1509 and we should process this normally. */
1510 lower_eh_constructs_1 (state
, &TREE_OPERAND (*tp
, 1));
1511 frob_into_branch_around (tp
, fake_tf
.eh_label
, fake_tf
.fallthru_label
);
1515 /* In this case honor_protect_cleanup_actions did nearly all of
1516 the work. All we have left is to append the fallthru_label. */
1518 *tp
= TREE_OPERAND (*tp
, 0);
1519 if (fake_tf
.fallthru_label
)
1521 tree x
= build1 (LABEL_EXPR
, void_type_node
, fake_tf
.fallthru_label
);
1522 append_to_statement_list (x
, tp
);
1527 /* Main loop for lowering eh constructs. */
1530 lower_eh_constructs_1 (struct leh_state
*state
, tree
*tp
)
1532 tree_stmt_iterator i
;
1535 switch (TREE_CODE (t
))
1538 lower_eh_constructs_1 (state
, &COND_EXPR_THEN (t
));
1539 lower_eh_constructs_1 (state
, &COND_EXPR_ELSE (t
));
1543 /* Look for things that can throw exceptions, and record them. */
1544 if (state
->cur_region
&& tree_could_throw_p (t
))
1546 record_stmt_eh_region (state
->cur_region
, t
);
1547 note_eh_region_may_contain_throw (state
->cur_region
);
1552 /* Look for things that can throw exceptions, and record them. */
1553 if (state
->cur_region
&& tree_could_throw_p (t
))
1555 record_stmt_eh_region (state
->cur_region
, t
);
1556 note_eh_region_may_contain_throw (state
->cur_region
);
1558 /* ??? For the benefit of calls.c, converting all this to rtl,
1559 we need to record the call expression, not just the outer
1560 modify statement. */
1561 if (TREE_CODE (TREE_OPERAND (t
, 1)) == CALL_EXPR
)
1562 record_stmt_eh_region (state
->cur_region
, TREE_OPERAND (t
, 1));
1568 maybe_record_in_goto_queue (state
, t
);
1571 verify_norecord_switch_expr (state
, t
);
1574 case TRY_FINALLY_EXPR
:
1575 lower_try_finally (state
, tp
);
1578 case TRY_CATCH_EXPR
:
1579 i
= tsi_start (TREE_OPERAND (t
, 1));
1580 switch (TREE_CODE (tsi_stmt (i
)))
1583 lower_catch (state
, tp
);
1585 case EH_FILTER_EXPR
:
1586 lower_eh_filter (state
, tp
);
1589 lower_cleanup (state
, tp
);
1594 case STATEMENT_LIST
:
1595 for (i
= tsi_start (t
); !tsi_end_p (i
); )
1597 lower_eh_constructs_1 (state
, tsi_stmt_ptr (i
));
1599 if (TREE_CODE (t
) == STATEMENT_LIST
)
1601 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1610 /* A type, a decl, or some kind of statement that we're not
1611 interested in. Don't walk them. */
1617 lower_eh_constructs (void)
1619 struct leh_state null_state
;
1620 tree
*tp
= &DECL_SAVED_TREE (current_function_decl
);
1622 finally_tree
= htab_create (31, struct_ptr_hash
, struct_ptr_eq
, free
);
1623 throw_stmt_table
= htab_create_ggc (31, struct_ptr_hash
, struct_ptr_eq
,
1626 collect_finally_tree (*tp
, NULL
);
1628 memset (&null_state
, 0, sizeof (null_state
));
1629 lower_eh_constructs_1 (&null_state
, tp
);
1631 htab_delete (finally_tree
);
1633 collect_eh_region_array ();
1636 struct tree_opt_pass pass_lower_eh
=
1640 lower_eh_constructs
, /* execute */
1643 0, /* static_pass_number */
1644 TV_TREE_EH
, /* tv_id */
1645 PROP_gimple_lcf
, /* properties_required */
1646 PROP_gimple_leh
, /* properties_provided */
1647 PROP_gimple_lcf
, /* properties_destroyed */
1648 0, /* todo_flags_start */
1649 TODO_dump_func
/* todo_flags_finish */
1653 /* Construct EH edges for STMT. */
1656 make_eh_edge (struct eh_region
*region
, void *data
)
1659 basic_block src
, dst
;
1662 lab
= get_eh_region_tree_label (region
);
1664 src
= bb_for_stmt (stmt
);
1665 dst
= label_to_block (lab
);
1667 make_edge (src
, dst
, EDGE_ABNORMAL
| EDGE_EH
);
1671 make_eh_edges (tree stmt
)
1676 if (TREE_CODE (stmt
) == RESX_EXPR
)
1678 region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
1683 region_nr
= lookup_stmt_eh_region (stmt
);
1689 foreach_reachable_handler (region_nr
, is_resx
, make_eh_edge
, stmt
);
1694 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1695 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1696 This routine expects only GIMPLE lhs or rhs input. */
1699 tree_could_trap_p (tree expr
)
1701 enum tree_code code
= TREE_CODE (expr
);
1702 bool honor_nans
= false;
1703 bool honor_snans
= false;
1704 bool fp_operation
= false;
1707 if (TREE_CODE_CLASS (code
) == '<'
1708 || TREE_CODE_CLASS (code
) == '1'
1709 || TREE_CODE_CLASS (code
) == '2')
1711 t
= TREE_TYPE (expr
);
1712 fp_operation
= FLOAT_TYPE_P (t
);
1715 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
1716 honor_snans
= flag_signaling_nans
!= 0;
1723 case ARRAY_RANGE_REF
:
1728 t
= get_base_address (expr
);
1729 return !t
|| tree_could_trap_p (t
);
1732 return !TREE_THIS_NOTRAP (expr
);
1735 return TREE_THIS_VOLATILE (expr
);
1737 case TRUNC_DIV_EXPR
:
1739 case FLOOR_DIV_EXPR
:
1740 case ROUND_DIV_EXPR
:
1741 case EXACT_DIV_EXPR
:
1743 case FLOOR_MOD_EXPR
:
1744 case ROUND_MOD_EXPR
:
1745 case TRUNC_MOD_EXPR
:
1749 if (fp_operation
&& flag_trapping_math
)
1751 t
= TREE_OPERAND (expr
, 1);
1752 if (!TREE_CONSTANT (t
) || integer_zerop (t
))
1761 /* Some floating point comparisons may trap. */
1766 case UNORDERED_EXPR
:
1776 case FIX_TRUNC_EXPR
:
1778 case FIX_FLOOR_EXPR
:
1779 case FIX_ROUND_EXPR
:
1780 /* Conversion of floating point might trap. */
1786 /* These operations don't trap even with floating point. */
1790 /* Any floating arithmetic may trap. */
1791 if (fp_operation
&& flag_trapping_math
)
1798 tree_could_throw_p (tree t
)
1800 if (!flag_exceptions
)
1802 if (TREE_CODE (t
) == MODIFY_EXPR
)
1804 if (flag_non_call_exceptions
1805 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
1807 t
= TREE_OPERAND (t
, 1);
1810 if (TREE_CODE (t
) == CALL_EXPR
)
1811 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
1812 if (flag_non_call_exceptions
)
1813 return tree_could_trap_p (t
);
1818 tree_can_throw_internal (tree stmt
)
1820 int region_nr
= lookup_stmt_eh_region (stmt
);
1823 return can_throw_internal_1 (region_nr
);
1827 tree_can_throw_external (tree stmt
)
1829 int region_nr
= lookup_stmt_eh_region (stmt
);
1832 return can_throw_external_1 (region_nr
);
1836 maybe_clean_eh_stmt (tree stmt
)
1838 if (!tree_could_throw_p (stmt
))
1839 if (remove_stmt_from_eh_region (stmt
))
1844 #include "gt-tree-eh.h"