1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
23 #include "hash-table.h"
28 #include "double-int.h"
35 #include "fold-const.h"
37 #include "hard-reg-set.h"
41 #include "statistics.h"
43 #include "fixed-value.h"
44 #include "insn-config.h"
55 #include "dominance.h"
58 #include "cfgcleanup.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
63 #include "gimple-expr.h"
66 #include "gimple-iterator.h"
67 #include "gimple-ssa.h"
69 #include "plugin-api.h"
73 #include "tree-phinodes.h"
74 #include "ssa-iterators.h"
75 #include "stringpool.h"
76 #include "tree-ssanames.h"
77 #include "tree-into-ssa.h"
79 #include "tree-inline.h"
80 #include "tree-pass.h"
81 #include "langhooks.h"
82 #include "diagnostic-core.h"
85 #include "gimple-low.h"
87 /* In some instances a tree and a gimple need to be stored in a same table,
88 i.e. in hash tables. This is a structure to do this. */
89 typedef union {tree
*tp
; tree t
; gimple g
;} treemple
;
91 /* Misc functions used in this file. */
93 /* Remember and lookup EH landing pad data for arbitrary statements.
94 Really this means any statement that could_throw_p. We could
95 stuff this information into the stmt_ann data structure, but:
97 (1) We absolutely rely on this information being kept until
98 we get to rtl. Once we're done with lowering here, if we lose
99 the information there's no way to recover it!
101 (2) There are many more statements that *cannot* throw as
102 compared to those that can. We should be saving some amount
103 of space by only allocating memory for those that can throw. */
105 /* Add statement T in function IFUN to landing pad NUM. */
108 add_stmt_to_eh_lp_fn (struct function
*ifun
, gimple t
, int num
)
110 gcc_assert (num
!= 0);
112 if (!get_eh_throw_stmt_table (ifun
))
113 set_eh_throw_stmt_table (ifun
, hash_map
<gimple
, int>::create_ggc (31));
115 gcc_assert (!get_eh_throw_stmt_table (ifun
)->put (t
, num
));
118 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
121 add_stmt_to_eh_lp (gimple t
, int num
)
123 add_stmt_to_eh_lp_fn (cfun
, t
, num
);
126 /* Add statement T to the single EH landing pad in REGION. */
129 record_stmt_eh_region (eh_region region
, gimple t
)
133 if (region
->type
== ERT_MUST_NOT_THROW
)
134 add_stmt_to_eh_lp_fn (cfun
, t
, -region
->index
);
137 eh_landing_pad lp
= region
->landing_pads
;
139 lp
= gen_eh_landing_pad (region
);
141 gcc_assert (lp
->next_lp
== NULL
);
142 add_stmt_to_eh_lp_fn (cfun
, t
, lp
->index
);
147 /* Remove statement T in function IFUN from its EH landing pad. */
150 remove_stmt_from_eh_lp_fn (struct function
*ifun
, gimple t
)
152 if (!get_eh_throw_stmt_table (ifun
))
155 if (!get_eh_throw_stmt_table (ifun
)->get (t
))
158 get_eh_throw_stmt_table (ifun
)->remove (t
);
163 /* Remove statement T in the current function (cfun) from its
167 remove_stmt_from_eh_lp (gimple t
)
169 return remove_stmt_from_eh_lp_fn (cfun
, t
);
172 /* Determine if statement T is inside an EH region in function IFUN.
173 Positive numbers indicate a landing pad index; negative numbers
174 indicate a MUST_NOT_THROW region index; zero indicates that the
175 statement is not recorded in the region table. */
178 lookup_stmt_eh_lp_fn (struct function
*ifun
, gimple t
)
180 if (ifun
->eh
->throw_stmt_table
== NULL
)
183 int *lp_nr
= ifun
->eh
->throw_stmt_table
->get (t
);
184 return lp_nr
? *lp_nr
: 0;
187 /* Likewise, but always use the current function. */
190 lookup_stmt_eh_lp (gimple t
)
192 /* We can get called from initialized data when -fnon-call-exceptions
193 is on; prevent crash. */
196 return lookup_stmt_eh_lp_fn (cfun
, t
);
199 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
200 nodes and LABEL_DECL nodes. We will use this during the second phase to
201 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
203 struct finally_tree_node
205 /* When storing a GIMPLE_TRY, we have to record a gimple. However
206 when deciding whether a GOTO to a certain LABEL_DECL (which is a
207 tree) leaves the TRY block, its necessary to record a tree in
208 this field. Thus a treemple is used. */
213 /* Hashtable helpers. */
215 struct finally_tree_hasher
: typed_free_remove
<finally_tree_node
>
217 typedef finally_tree_node value_type
;
218 typedef finally_tree_node compare_type
;
219 static inline hashval_t
hash (const value_type
*);
220 static inline bool equal (const value_type
*, const compare_type
*);
224 finally_tree_hasher::hash (const value_type
*v
)
226 return (intptr_t)v
->child
.t
>> 4;
230 finally_tree_hasher::equal (const value_type
*v
, const compare_type
*c
)
232 return v
->child
.t
== c
->child
.t
;
235 /* Note that this table is *not* marked GTY. It is short-lived. */
236 static hash_table
<finally_tree_hasher
> *finally_tree
;
239 record_in_finally_tree (treemple child
, gtry
*parent
)
241 struct finally_tree_node
*n
;
242 finally_tree_node
**slot
;
244 n
= XNEW (struct finally_tree_node
);
248 slot
= finally_tree
->find_slot (n
, INSERT
);
254 collect_finally_tree (gimple stmt
, gtry
*region
);
256 /* Go through the gimple sequence. Works with collect_finally_tree to
257 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
260 collect_finally_tree_1 (gimple_seq seq
, gtry
*region
)
262 gimple_stmt_iterator gsi
;
264 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
265 collect_finally_tree (gsi_stmt (gsi
), region
);
269 collect_finally_tree (gimple stmt
, gtry
*region
)
273 switch (gimple_code (stmt
))
276 temp
.t
= gimple_label_label (as_a
<glabel
*> (stmt
));
277 record_in_finally_tree (temp
, region
);
281 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
284 record_in_finally_tree (temp
, region
);
285 collect_finally_tree_1 (gimple_try_eval (stmt
),
286 as_a
<gtry
*> (stmt
));
287 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
289 else if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
291 collect_finally_tree_1 (gimple_try_eval (stmt
), region
);
292 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
297 collect_finally_tree_1 (gimple_catch_handler (
298 as_a
<gcatch
*> (stmt
)),
302 case GIMPLE_EH_FILTER
:
303 collect_finally_tree_1 (gimple_eh_filter_failure (stmt
), region
);
308 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
309 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt
), region
);
310 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt
), region
);
315 /* A type, a decl, or some kind of statement that we're not
316 interested in. Don't walk them. */
322 /* Use the finally tree to determine if a jump from START to TARGET
323 would leave the try_finally node that START lives in. */
326 outside_finally_tree (treemple start
, gimple target
)
328 struct finally_tree_node n
, *p
;
333 p
= finally_tree
->find (&n
);
338 while (start
.g
!= target
);
343 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
344 nodes into a set of gotos, magic labels, and eh regions.
345 The eh region creation is straight-forward, but frobbing all the gotos
346 and such into shape isn't. */
348 /* The sequence into which we record all EH stuff. This will be
349 placed at the end of the function when we're all done. */
350 static gimple_seq eh_seq
;
352 /* Record whether an EH region contains something that can throw,
353 indexed by EH region number. */
354 static bitmap eh_region_may_contain_throw_map
;
356 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
357 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
358 The idea is to record a gimple statement for everything except for
359 the conditionals, which get their labels recorded. Since labels are
360 of type 'tree', we need this node to store both gimple and tree
361 objects. REPL_STMT is the sequence used to replace the goto/return
362 statement. CONT_STMT is used to store the statement that allows
363 the return/goto to jump to the original destination. */
365 struct goto_queue_node
369 gimple_seq repl_stmt
;
372 /* This is used when index >= 0 to indicate that stmt is a label (as
373 opposed to a goto stmt). */
377 /* State of the world while lowering. */
381 /* What's "current" while constructing the eh region tree. These
382 correspond to variables of the same name in cfun->eh, which we
383 don't have easy access to. */
384 eh_region cur_region
;
386 /* What's "current" for the purposes of __builtin_eh_pointer. For
387 a CATCH, this is the associated TRY. For an EH_FILTER, this is
388 the associated ALLOWED_EXCEPTIONS, etc. */
389 eh_region ehp_region
;
391 /* Processing of TRY_FINALLY requires a bit more state. This is
392 split out into a separate structure so that we don't have to
393 copy so much when processing other nodes. */
394 struct leh_tf_state
*tf
;
399 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
400 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
401 this so that outside_finally_tree can reliably reference the tree used
402 in the collect_finally_tree data structures. */
403 gtry
*try_finally_expr
;
406 /* While lowering a top_p usually it is expanded into multiple statements,
407 thus we need the following field to store them. */
408 gimple_seq top_p_seq
;
410 /* The state outside this try_finally node. */
411 struct leh_state
*outer
;
413 /* The exception region created for it. */
416 /* The goto queue. */
417 struct goto_queue_node
*goto_queue
;
418 size_t goto_queue_size
;
419 size_t goto_queue_active
;
421 /* Pointer map to help in searching goto_queue when it is large. */
422 hash_map
<gimple
, goto_queue_node
*> *goto_queue_map
;
424 /* The set of unique labels seen as entries in the goto queue. */
425 vec
<tree
> dest_array
;
427 /* A label to be added at the end of the completed transformed
428 sequence. It will be set if may_fallthru was true *at one time*,
429 though subsequent transformations may have cleared that flag. */
432 /* True if it is possible to fall out the bottom of the try block.
433 Cleared if the fallthru is converted to a goto. */
436 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
439 /* True if the finally block can receive an exception edge.
440 Cleared if the exception case is handled by code duplication. */
444 static gimple_seq
lower_eh_must_not_throw (struct leh_state
*, gtry
*);
446 /* Search for STMT in the goto queue. Return the replacement,
447 or null if the statement isn't in the queue. */
449 #define LARGE_GOTO_QUEUE 20
451 static void lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*seq
);
454 find_goto_replacement (struct leh_tf_state
*tf
, treemple stmt
)
458 if (tf
->goto_queue_active
< LARGE_GOTO_QUEUE
)
460 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
461 if ( tf
->goto_queue
[i
].stmt
.g
== stmt
.g
)
462 return tf
->goto_queue
[i
].repl_stmt
;
466 /* If we have a large number of entries in the goto_queue, create a
467 pointer map and use that for searching. */
469 if (!tf
->goto_queue_map
)
471 tf
->goto_queue_map
= new hash_map
<gimple
, goto_queue_node
*>;
472 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
474 bool existed
= tf
->goto_queue_map
->put (tf
->goto_queue
[i
].stmt
.g
,
476 gcc_assert (!existed
);
480 goto_queue_node
**slot
= tf
->goto_queue_map
->get (stmt
.g
);
482 return ((*slot
)->repl_stmt
);
487 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
488 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
489 then we can just splat it in, otherwise we add the new stmts immediately
490 after the GIMPLE_COND and redirect. */
493 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
494 gimple_stmt_iterator
*gsi
)
499 location_t loc
= gimple_location (gsi_stmt (*gsi
));
502 new_seq
= find_goto_replacement (tf
, temp
);
506 if (gimple_seq_singleton_p (new_seq
)
507 && gimple_code (gimple_seq_first_stmt (new_seq
)) == GIMPLE_GOTO
)
509 *tp
= gimple_goto_dest (gimple_seq_first_stmt (new_seq
));
513 label
= create_artificial_label (loc
);
514 /* Set the new label for the GIMPLE_COND */
517 gsi_insert_after (gsi
, gimple_build_label (label
), GSI_CONTINUE_LINKING
);
518 gsi_insert_seq_after (gsi
, gimple_seq_copy (new_seq
), GSI_CONTINUE_LINKING
);
521 /* The real work of replace_goto_queue. Returns with TSI updated to
522 point to the next statement. */
524 static void replace_goto_queue_stmt_list (gimple_seq
*, struct leh_tf_state
*);
527 replace_goto_queue_1 (gimple stmt
, struct leh_tf_state
*tf
,
528 gimple_stmt_iterator
*gsi
)
534 switch (gimple_code (stmt
))
539 seq
= find_goto_replacement (tf
, temp
);
542 gsi_insert_seq_before (gsi
, gimple_seq_copy (seq
), GSI_SAME_STMT
);
543 gsi_remove (gsi
, false);
549 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 2), tf
, gsi
);
550 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 3), tf
, gsi
);
554 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt
), tf
);
555 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt
), tf
);
558 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
559 as_a
<gcatch
*> (stmt
)),
562 case GIMPLE_EH_FILTER
:
563 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt
), tf
);
567 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
568 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt
),
570 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt
),
576 /* These won't have gotos in them. */
583 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
586 replace_goto_queue_stmt_list (gimple_seq
*seq
, struct leh_tf_state
*tf
)
588 gimple_stmt_iterator gsi
= gsi_start (*seq
);
590 while (!gsi_end_p (gsi
))
591 replace_goto_queue_1 (gsi_stmt (gsi
), tf
, &gsi
);
594 /* Replace all goto queue members. */
597 replace_goto_queue (struct leh_tf_state
*tf
)
599 if (tf
->goto_queue_active
== 0)
601 replace_goto_queue_stmt_list (&tf
->top_p_seq
, tf
);
602 replace_goto_queue_stmt_list (&eh_seq
, tf
);
605 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
606 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
610 record_in_goto_queue (struct leh_tf_state
*tf
,
617 struct goto_queue_node
*q
;
619 gcc_assert (!tf
->goto_queue_map
);
621 active
= tf
->goto_queue_active
;
622 size
= tf
->goto_queue_size
;
625 size
= (size
? size
* 2 : 32);
626 tf
->goto_queue_size
= size
;
628 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
631 q
= &tf
->goto_queue
[active
];
632 tf
->goto_queue_active
= active
+ 1;
634 memset (q
, 0, sizeof (*q
));
637 q
->location
= location
;
638 q
->is_label
= is_label
;
641 /* Record the LABEL label in the goto queue contained in TF.
645 record_in_goto_queue_label (struct leh_tf_state
*tf
, treemple stmt
, tree label
,
649 treemple temp
, new_stmt
;
654 /* Computed and non-local gotos do not get processed. Given
655 their nature we can neither tell whether we've escaped the
656 finally block nor redirect them if we knew. */
657 if (TREE_CODE (label
) != LABEL_DECL
)
660 /* No need to record gotos that don't leave the try block. */
662 if (!outside_finally_tree (temp
, tf
->try_finally_expr
))
665 if (! tf
->dest_array
.exists ())
667 tf
->dest_array
.create (10);
668 tf
->dest_array
.quick_push (label
);
673 int n
= tf
->dest_array
.length ();
674 for (index
= 0; index
< n
; ++index
)
675 if (tf
->dest_array
[index
] == label
)
678 tf
->dest_array
.safe_push (label
);
681 /* In the case of a GOTO we want to record the destination label,
682 since with a GIMPLE_COND we have an easy access to the then/else
685 record_in_goto_queue (tf
, new_stmt
, index
, true, location
);
688 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
689 node, and if so record that fact in the goto queue associated with that
693 maybe_record_in_goto_queue (struct leh_state
*state
, gimple stmt
)
695 struct leh_tf_state
*tf
= state
->tf
;
701 switch (gimple_code (stmt
))
705 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
706 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 2);
707 record_in_goto_queue_label (tf
, new_stmt
,
708 gimple_cond_true_label (cond_stmt
),
709 EXPR_LOCATION (*new_stmt
.tp
));
710 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 3);
711 record_in_goto_queue_label (tf
, new_stmt
,
712 gimple_cond_false_label (cond_stmt
),
713 EXPR_LOCATION (*new_stmt
.tp
));
718 record_in_goto_queue_label (tf
, new_stmt
, gimple_goto_dest (stmt
),
719 gimple_location (stmt
));
723 tf
->may_return
= true;
725 record_in_goto_queue (tf
, new_stmt
, -1, false, gimple_location (stmt
));
734 #ifdef ENABLE_CHECKING
735 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
736 was in fact structured, and we've not yet done jump threading, then none
737 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
740 verify_norecord_switch_expr (struct leh_state
*state
,
741 gswitch
*switch_expr
)
743 struct leh_tf_state
*tf
= state
->tf
;
749 n
= gimple_switch_num_labels (switch_expr
);
751 for (i
= 0; i
< n
; ++i
)
754 tree lab
= CASE_LABEL (gimple_switch_label (switch_expr
, i
));
756 gcc_assert (!outside_finally_tree (temp
, tf
->try_finally_expr
));
760 #define verify_norecord_switch_expr(state, switch_expr)
763 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
764 non-null, insert it before the new branch. */
767 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
)
771 /* In the case of a return, the queue node must be a gimple statement. */
772 gcc_assert (!q
->is_label
);
774 /* Note that the return value may have already been computed, e.g.,
787 should return 0, not 1. We don't have to do anything to make
788 this happens because the return value has been placed in the
789 RESULT_DECL already. */
791 q
->cont_stmt
= q
->stmt
.g
;
794 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
796 x
= gimple_build_goto (finlab
);
797 gimple_set_location (x
, q
->location
);
798 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
801 /* Similar, but easier, for GIMPLE_GOTO. */
804 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
,
805 struct leh_tf_state
*tf
)
809 gcc_assert (q
->is_label
);
811 q
->cont_stmt
= gimple_build_goto (tf
->dest_array
[q
->index
]);
814 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
816 x
= gimple_build_goto (finlab
);
817 gimple_set_location (x
, q
->location
);
818 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
821 /* Emit a standard landing pad sequence into SEQ for REGION. */
824 emit_post_landing_pad (gimple_seq
*seq
, eh_region region
)
826 eh_landing_pad lp
= region
->landing_pads
;
830 lp
= gen_eh_landing_pad (region
);
832 lp
->post_landing_pad
= create_artificial_label (UNKNOWN_LOCATION
);
833 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = lp
->index
;
835 x
= gimple_build_label (lp
->post_landing_pad
);
836 gimple_seq_add_stmt (seq
, x
);
839 /* Emit a RESX statement into SEQ for REGION. */
842 emit_resx (gimple_seq
*seq
, eh_region region
)
844 gresx
*x
= gimple_build_resx (region
->index
);
845 gimple_seq_add_stmt (seq
, x
);
847 record_stmt_eh_region (region
->outer
, x
);
850 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
853 emit_eh_dispatch (gimple_seq
*seq
, eh_region region
)
855 geh_dispatch
*x
= gimple_build_eh_dispatch (region
->index
);
856 gimple_seq_add_stmt (seq
, x
);
859 /* Note that the current EH region may contain a throw, or a
860 call to a function which itself may contain a throw. */
863 note_eh_region_may_contain_throw (eh_region region
)
865 while (bitmap_set_bit (eh_region_may_contain_throw_map
, region
->index
))
867 if (region
->type
== ERT_MUST_NOT_THROW
)
869 region
= region
->outer
;
875 /* Check if REGION has been marked as containing a throw. If REGION is
876 NULL, this predicate is false. */
879 eh_region_may_contain_throw (eh_region r
)
881 return r
&& bitmap_bit_p (eh_region_may_contain_throw_map
, r
->index
);
884 /* We want to transform
885 try { body; } catch { stuff; }
895 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
896 should be placed before the second operand, or NULL. OVER is
897 an existing label that should be put at the exit, or NULL. */
900 frob_into_branch_around (gtry
*tp
, eh_region region
, tree over
)
903 gimple_seq cleanup
, result
;
904 location_t loc
= gimple_location (tp
);
906 cleanup
= gimple_try_cleanup (tp
);
907 result
= gimple_try_eval (tp
);
910 emit_post_landing_pad (&eh_seq
, region
);
912 if (gimple_seq_may_fallthru (cleanup
))
915 over
= create_artificial_label (loc
);
916 x
= gimple_build_goto (over
);
917 gimple_set_location (x
, loc
);
918 gimple_seq_add_stmt (&cleanup
, x
);
920 gimple_seq_add_seq (&eh_seq
, cleanup
);
924 x
= gimple_build_label (over
);
925 gimple_seq_add_stmt (&result
, x
);
930 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
931 Make sure to record all new labels found. */
934 lower_try_finally_dup_block (gimple_seq seq
, struct leh_state
*outer_state
,
939 gimple_stmt_iterator gsi
;
941 new_seq
= copy_gimple_seq_and_replace_locals (seq
);
943 for (gsi
= gsi_start (new_seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
945 gimple stmt
= gsi_stmt (gsi
);
946 if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
948 tree block
= gimple_block (stmt
);
949 gimple_set_location (stmt
, loc
);
950 gimple_set_block (stmt
, block
);
955 region
= outer_state
->tf
->try_finally_expr
;
956 collect_finally_tree_1 (new_seq
, region
);
961 /* A subroutine of lower_try_finally. Create a fallthru label for
962 the given try_finally state. The only tricky bit here is that
963 we have to make sure to record the label in our outer context. */
966 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
968 tree label
= tf
->fallthru_label
;
973 label
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
974 tf
->fallthru_label
= label
;
978 record_in_finally_tree (temp
, tf
->outer
->tf
->try_finally_expr
);
984 /* A subroutine of lower_try_finally. If FINALLY consits of a
985 GIMPLE_EH_ELSE node, return it. */
987 static inline geh_else
*
988 get_eh_else (gimple_seq finally
)
990 gimple x
= gimple_seq_first_stmt (finally
);
991 if (gimple_code (x
) == GIMPLE_EH_ELSE
)
993 gcc_assert (gimple_seq_singleton_p (finally
));
994 return as_a
<geh_else
*> (x
);
999 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
1000 langhook returns non-null, then the language requires that the exception
1001 path out of a try_finally be treated specially. To wit: the code within
1002 the finally block may not itself throw an exception. We have two choices
1003 here. First we can duplicate the finally block and wrap it in a
1004 must_not_throw region. Second, we can generate code like
1009 if (fintmp == eh_edge)
1010 protect_cleanup_actions;
1013 where "fintmp" is the temporary used in the switch statement generation
1014 alternative considered below. For the nonce, we always choose the first
1017 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
1020 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
1021 struct leh_state
*this_state
,
1022 struct leh_tf_state
*tf
)
1024 tree protect_cleanup_actions
;
1025 gimple_stmt_iterator gsi
;
1026 bool finally_may_fallthru
;
1033 /* First check for nothing to do. */
1034 if (lang_hooks
.eh_protect_cleanup_actions
== NULL
)
1036 protect_cleanup_actions
= lang_hooks
.eh_protect_cleanup_actions ();
1037 if (protect_cleanup_actions
== NULL
)
1040 finally
= gimple_try_cleanup (tf
->top_p
);
1041 eh_else
= get_eh_else (finally
);
1043 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1044 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1047 finally
= gimple_eh_else_e_body (eh_else
);
1048 gimple_try_set_cleanup (tf
->top_p
, gimple_eh_else_n_body (eh_else
));
1050 else if (this_state
)
1051 finally
= lower_try_finally_dup_block (finally
, outer_state
,
1052 gimple_location (tf
->try_finally_expr
));
1053 finally_may_fallthru
= gimple_seq_may_fallthru (finally
);
1055 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1056 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1057 to be in an enclosing scope, but needs to be implemented at this level
1058 to avoid a nesting violation (see wrap_temporary_cleanups in
1059 cp/decl.c). Since it's logically at an outer level, we should call
1060 terminate before we get to it, so strip it away before adding the
1061 MUST_NOT_THROW filter. */
1062 gsi
= gsi_start (finally
);
1064 if (gimple_code (x
) == GIMPLE_TRY
1065 && gimple_try_kind (x
) == GIMPLE_TRY_CATCH
1066 && gimple_try_catch_is_cleanup (x
))
1068 gsi_insert_seq_before (&gsi
, gimple_try_eval (x
), GSI_SAME_STMT
);
1069 gsi_remove (&gsi
, false);
1072 /* Wrap the block with protect_cleanup_actions as the action. */
1073 eh_mnt
= gimple_build_eh_must_not_throw (protect_cleanup_actions
);
1074 try_stmt
= gimple_build_try (finally
, gimple_seq_alloc_with_stmt (eh_mnt
),
1076 finally
= lower_eh_must_not_throw (outer_state
, try_stmt
);
1078 /* Drop all of this into the exception sequence. */
1079 emit_post_landing_pad (&eh_seq
, tf
->region
);
1080 gimple_seq_add_seq (&eh_seq
, finally
);
1081 if (finally_may_fallthru
)
1082 emit_resx (&eh_seq
, tf
->region
);
1084 /* Having now been handled, EH isn't to be considered with
1085 the rest of the outgoing edges. */
1086 tf
->may_throw
= false;
1089 /* A subroutine of lower_try_finally. We have determined that there is
1090 no fallthru edge out of the finally block. This means that there is
1091 no outgoing edge corresponding to any incoming edge. Restructure the
1092 try_finally node for this special case. */
1095 lower_try_finally_nofallthru (struct leh_state
*state
,
1096 struct leh_tf_state
*tf
)
1102 struct goto_queue_node
*q
, *qe
;
1104 lab
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
1106 /* We expect that tf->top_p is a GIMPLE_TRY. */
1107 finally
= gimple_try_cleanup (tf
->top_p
);
1108 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1110 x
= gimple_build_label (lab
);
1111 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1114 qe
= q
+ tf
->goto_queue_active
;
1117 do_return_redirection (q
, lab
, NULL
);
1119 do_goto_redirection (q
, lab
, NULL
, tf
);
1121 replace_goto_queue (tf
);
1123 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1124 eh_else
= get_eh_else (finally
);
1127 finally
= gimple_eh_else_n_body (eh_else
);
1128 lower_eh_constructs_1 (state
, &finally
);
1129 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1133 finally
= gimple_eh_else_e_body (eh_else
);
1134 lower_eh_constructs_1 (state
, &finally
);
1136 emit_post_landing_pad (&eh_seq
, tf
->region
);
1137 gimple_seq_add_seq (&eh_seq
, finally
);
1142 lower_eh_constructs_1 (state
, &finally
);
1143 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1147 emit_post_landing_pad (&eh_seq
, tf
->region
);
1149 x
= gimple_build_goto (lab
);
1150 gimple_set_location (x
, gimple_location (tf
->try_finally_expr
));
1151 gimple_seq_add_stmt (&eh_seq
, x
);
1156 /* A subroutine of lower_try_finally. We have determined that there is
1157 exactly one destination of the finally block. Restructure the
1158 try_finally node for this special case. */
1161 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
1163 struct goto_queue_node
*q
, *qe
;
1168 gimple_stmt_iterator gsi
;
1170 location_t loc
= gimple_location (tf
->try_finally_expr
);
1172 finally
= gimple_try_cleanup (tf
->top_p
);
1173 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1175 /* Since there's only one destination, and the destination edge can only
1176 either be EH or non-EH, that implies that all of our incoming edges
1177 are of the same type. Therefore we can lower EH_ELSE immediately. */
1178 eh_else
= get_eh_else (finally
);
1182 finally
= gimple_eh_else_e_body (eh_else
);
1184 finally
= gimple_eh_else_n_body (eh_else
);
1187 lower_eh_constructs_1 (state
, &finally
);
1189 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1191 gimple stmt
= gsi_stmt (gsi
);
1192 if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
1194 tree block
= gimple_block (stmt
);
1195 gimple_set_location (stmt
, gimple_location (tf
->try_finally_expr
));
1196 gimple_set_block (stmt
, block
);
1202 /* Only reachable via the exception edge. Add the given label to
1203 the head of the FINALLY block. Append a RESX at the end. */
1204 emit_post_landing_pad (&eh_seq
, tf
->region
);
1205 gimple_seq_add_seq (&eh_seq
, finally
);
1206 emit_resx (&eh_seq
, tf
->region
);
1210 if (tf
->may_fallthru
)
1212 /* Only reachable via the fallthru edge. Do nothing but let
1213 the two blocks run together; we'll fall out the bottom. */
1214 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1218 finally_label
= create_artificial_label (loc
);
1219 label_stmt
= gimple_build_label (finally_label
);
1220 gimple_seq_add_stmt (&tf
->top_p_seq
, label_stmt
);
1222 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1225 qe
= q
+ tf
->goto_queue_active
;
1229 /* Reachable by return expressions only. Redirect them. */
1231 do_return_redirection (q
, finally_label
, NULL
);
1232 replace_goto_queue (tf
);
1236 /* Reachable by goto expressions only. Redirect them. */
1238 do_goto_redirection (q
, finally_label
, NULL
, tf
);
1239 replace_goto_queue (tf
);
1241 if (tf
->dest_array
[0] == tf
->fallthru_label
)
1243 /* Reachable by goto to fallthru label only. Redirect it
1244 to the new label (already created, sadly), and do not
1245 emit the final branch out, or the fallthru label. */
1246 tf
->fallthru_label
= NULL
;
1251 /* Place the original return/goto to the original destination
1252 immediately after the finally block. */
1253 x
= tf
->goto_queue
[0].cont_stmt
;
1254 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1255 maybe_record_in_goto_queue (state
, x
);
1258 /* A subroutine of lower_try_finally. There are multiple edges incoming
1259 and outgoing from the finally block. Implement this by duplicating the
1260 finally block for every destination. */
1263 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1266 gimple_seq new_stmt
;
1271 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1273 finally
= gimple_try_cleanup (tf
->top_p
);
1275 /* Notice EH_ELSE, and simplify some of the remaining code
1276 by considering FINALLY to be the normal return path only. */
1277 eh_else
= get_eh_else (finally
);
1279 finally
= gimple_eh_else_n_body (eh_else
);
1281 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1284 if (tf
->may_fallthru
)
1286 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1287 lower_eh_constructs_1 (state
, &seq
);
1288 gimple_seq_add_seq (&new_stmt
, seq
);
1290 tmp
= lower_try_finally_fallthru_label (tf
);
1291 x
= gimple_build_goto (tmp
);
1292 gimple_set_location (x
, tf_loc
);
1293 gimple_seq_add_stmt (&new_stmt
, x
);
1298 /* We don't need to copy the EH path of EH_ELSE,
1299 since it is only emitted once. */
1301 seq
= gimple_eh_else_e_body (eh_else
);
1303 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1304 lower_eh_constructs_1 (state
, &seq
);
1306 emit_post_landing_pad (&eh_seq
, tf
->region
);
1307 gimple_seq_add_seq (&eh_seq
, seq
);
1308 emit_resx (&eh_seq
, tf
->region
);
1313 struct goto_queue_node
*q
, *qe
;
1314 int return_index
, index
;
1317 struct goto_queue_node
*q
;
1321 return_index
= tf
->dest_array
.length ();
1322 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1325 qe
= q
+ tf
->goto_queue_active
;
1328 index
= q
->index
< 0 ? return_index
: q
->index
;
1330 if (!labels
[index
].q
)
1331 labels
[index
].q
= q
;
1334 for (index
= 0; index
< return_index
+ 1; index
++)
1338 q
= labels
[index
].q
;
1342 lab
= labels
[index
].label
1343 = create_artificial_label (tf_loc
);
1345 if (index
== return_index
)
1346 do_return_redirection (q
, lab
, NULL
);
1348 do_goto_redirection (q
, lab
, NULL
, tf
);
1350 x
= gimple_build_label (lab
);
1351 gimple_seq_add_stmt (&new_stmt
, x
);
1353 seq
= lower_try_finally_dup_block (finally
, state
, q
->location
);
1354 lower_eh_constructs_1 (state
, &seq
);
1355 gimple_seq_add_seq (&new_stmt
, seq
);
1357 gimple_seq_add_stmt (&new_stmt
, q
->cont_stmt
);
1358 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1361 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1365 index
= q
->index
< 0 ? return_index
: q
->index
;
1367 if (labels
[index
].q
== q
)
1370 lab
= labels
[index
].label
;
1372 if (index
== return_index
)
1373 do_return_redirection (q
, lab
, NULL
);
1375 do_goto_redirection (q
, lab
, NULL
, tf
);
1378 replace_goto_queue (tf
);
1382 /* Need to link new stmts after running replace_goto_queue due
1383 to not wanting to process the same goto stmts twice. */
1384 gimple_seq_add_seq (&tf
->top_p_seq
, new_stmt
);
1387 /* A subroutine of lower_try_finally. There are multiple edges incoming
1388 and outgoing from the finally block. Implement this by instrumenting
1389 each incoming edge and creating a switch statement at the end of the
1390 finally block that branches to the appropriate destination. */
1393 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1395 struct goto_queue_node
*q
, *qe
;
1396 tree finally_tmp
, finally_label
;
1397 int return_index
, eh_index
, fallthru_index
;
1398 int nlabels
, ndests
, j
, last_case_index
;
1400 vec
<tree
> case_label_vec
;
1401 gimple_seq switch_body
= NULL
;
1407 hash_map
<tree
, gimple
> *cont_map
= NULL
;
1408 /* The location of the TRY_FINALLY stmt. */
1409 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1410 /* The location of the finally block. */
1411 location_t finally_loc
;
1413 finally
= gimple_try_cleanup (tf
->top_p
);
1414 eh_else
= get_eh_else (finally
);
1416 /* Mash the TRY block to the head of the chain. */
1417 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1419 /* The location of the finally is either the last stmt in the finally
1420 block or the location of the TRY_FINALLY itself. */
1421 x
= gimple_seq_last_stmt (finally
);
1422 finally_loc
= x
? gimple_location (x
) : tf_loc
;
1424 /* Prepare for switch statement generation. */
1425 nlabels
= tf
->dest_array
.length ();
1426 return_index
= nlabels
;
1427 eh_index
= return_index
+ tf
->may_return
;
1428 fallthru_index
= eh_index
+ (tf
->may_throw
&& !eh_else
);
1429 ndests
= fallthru_index
+ tf
->may_fallthru
;
1431 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1432 finally_label
= create_artificial_label (finally_loc
);
1434 /* We use vec::quick_push on case_label_vec throughout this function,
1435 since we know the size in advance and allocate precisely as muce
1437 case_label_vec
.create (ndests
);
1439 last_case_index
= 0;
1441 /* Begin inserting code for getting to the finally block. Things
1442 are done in this order to correspond to the sequence the code is
1445 if (tf
->may_fallthru
)
1447 x
= gimple_build_assign (finally_tmp
,
1448 build_int_cst (integer_type_node
,
1450 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1452 tmp
= build_int_cst (integer_type_node
, fallthru_index
);
1453 last_case
= build_case_label (tmp
, NULL
,
1454 create_artificial_label (tf_loc
));
1455 case_label_vec
.quick_push (last_case
);
1458 x
= gimple_build_label (CASE_LABEL (last_case
));
1459 gimple_seq_add_stmt (&switch_body
, x
);
1461 tmp
= lower_try_finally_fallthru_label (tf
);
1462 x
= gimple_build_goto (tmp
);
1463 gimple_set_location (x
, tf_loc
);
1464 gimple_seq_add_stmt (&switch_body
, x
);
1467 /* For EH_ELSE, emit the exception path (plus resx) now, then
1468 subsequently we only need consider the normal path. */
1473 finally
= gimple_eh_else_e_body (eh_else
);
1474 lower_eh_constructs_1 (state
, &finally
);
1476 emit_post_landing_pad (&eh_seq
, tf
->region
);
1477 gimple_seq_add_seq (&eh_seq
, finally
);
1478 emit_resx (&eh_seq
, tf
->region
);
1481 finally
= gimple_eh_else_n_body (eh_else
);
1483 else if (tf
->may_throw
)
1485 emit_post_landing_pad (&eh_seq
, tf
->region
);
1487 x
= gimple_build_assign (finally_tmp
,
1488 build_int_cst (integer_type_node
, eh_index
));
1489 gimple_seq_add_stmt (&eh_seq
, x
);
1491 x
= gimple_build_goto (finally_label
);
1492 gimple_set_location (x
, tf_loc
);
1493 gimple_seq_add_stmt (&eh_seq
, x
);
1495 tmp
= build_int_cst (integer_type_node
, eh_index
);
1496 last_case
= build_case_label (tmp
, NULL
,
1497 create_artificial_label (tf_loc
));
1498 case_label_vec
.quick_push (last_case
);
1501 x
= gimple_build_label (CASE_LABEL (last_case
));
1502 gimple_seq_add_stmt (&eh_seq
, x
);
1503 emit_resx (&eh_seq
, tf
->region
);
1506 x
= gimple_build_label (finally_label
);
1507 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1509 lower_eh_constructs_1 (state
, &finally
);
1510 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1512 /* Redirect each incoming goto edge. */
1514 qe
= q
+ tf
->goto_queue_active
;
1515 j
= last_case_index
+ tf
->may_return
;
1516 /* Prepare the assignments to finally_tmp that are executed upon the
1517 entrance through a particular edge. */
1520 gimple_seq mod
= NULL
;
1522 unsigned int case_index
;
1526 x
= gimple_build_assign (finally_tmp
,
1527 build_int_cst (integer_type_node
,
1529 gimple_seq_add_stmt (&mod
, x
);
1530 do_return_redirection (q
, finally_label
, mod
);
1531 switch_id
= return_index
;
1535 x
= gimple_build_assign (finally_tmp
,
1536 build_int_cst (integer_type_node
, q
->index
));
1537 gimple_seq_add_stmt (&mod
, x
);
1538 do_goto_redirection (q
, finally_label
, mod
, tf
);
1539 switch_id
= q
->index
;
1542 case_index
= j
+ q
->index
;
1543 if (case_label_vec
.length () <= case_index
|| !case_label_vec
[case_index
])
1546 tmp
= build_int_cst (integer_type_node
, switch_id
);
1547 case_lab
= build_case_label (tmp
, NULL
,
1548 create_artificial_label (tf_loc
));
1549 /* We store the cont_stmt in the pointer map, so that we can recover
1550 it in the loop below. */
1552 cont_map
= new hash_map
<tree
, gimple
>;
1553 cont_map
->put (case_lab
, q
->cont_stmt
);
1554 case_label_vec
.quick_push (case_lab
);
1557 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1561 last_case
= case_label_vec
[j
];
1563 gcc_assert (last_case
);
1564 gcc_assert (cont_map
);
1566 cont_stmt
= *cont_map
->get (last_case
);
1568 x
= gimple_build_label (CASE_LABEL (last_case
));
1569 gimple_seq_add_stmt (&switch_body
, x
);
1570 gimple_seq_add_stmt (&switch_body
, cont_stmt
);
1571 maybe_record_in_goto_queue (state
, cont_stmt
);
1576 replace_goto_queue (tf
);
1578 /* Make sure that the last case is the default label, as one is required.
1579 Then sort the labels, which is also required in GIMPLE. */
1580 CASE_LOW (last_case
) = NULL
;
1581 tree tem
= case_label_vec
.pop ();
1582 gcc_assert (tem
== last_case
);
1583 sort_case_labels (case_label_vec
);
1585 /* Build the switch statement, setting last_case to be the default
1587 switch_stmt
= gimple_build_switch (finally_tmp
, last_case
,
1589 gimple_set_location (switch_stmt
, finally_loc
);
1591 /* Need to link SWITCH_STMT after running replace_goto_queue
1592 due to not wanting to process the same goto stmts twice. */
1593 gimple_seq_add_stmt (&tf
->top_p_seq
, switch_stmt
);
1594 gimple_seq_add_seq (&tf
->top_p_seq
, switch_body
);
1597 /* Decide whether or not we are going to duplicate the finally block.
1598 There are several considerations.
1600 First, if this is Java, then the finally block contains code
1601 written by the user. It has line numbers associated with it,
1602 so duplicating the block means it's difficult to set a breakpoint.
1603 Since controlling code generation via -g is verboten, we simply
1604 never duplicate code without optimization.
1606 Second, we'd like to prevent egregious code growth. One way to
1607 do this is to estimate the size of the finally block, multiply
1608 that by the number of copies we'd need to make, and compare against
1609 the estimate of the size of the switch machinery we'd have to add. */
1612 decide_copy_try_finally (int ndests
, bool may_throw
, gimple_seq finally
)
1614 int f_estimate
, sw_estimate
;
1617 /* If there's an EH_ELSE involved, the exception path is separate
1618 and really doesn't come into play for this computation. */
1619 eh_else
= get_eh_else (finally
);
1622 ndests
-= may_throw
;
1623 finally
= gimple_eh_else_n_body (eh_else
);
1628 gimple_stmt_iterator gsi
;
1633 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1635 gimple stmt
= gsi_stmt (gsi
);
1636 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
1642 /* Finally estimate N times, plus N gotos. */
1643 f_estimate
= count_insns_seq (finally
, &eni_size_weights
);
1644 f_estimate
= (f_estimate
+ 1) * ndests
;
1646 /* Switch statement (cost 10), N variable assignments, N gotos. */
1647 sw_estimate
= 10 + 2 * ndests
;
1649 /* Optimize for size clearly wants our best guess. */
1650 if (optimize_function_for_size_p (cfun
))
1651 return f_estimate
< sw_estimate
;
1653 /* ??? These numbers are completely made up so far. */
1655 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1657 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1660 /* REG is the enclosing region for a possible cleanup region, or the region
1661 itself. Returns TRUE if such a region would be unreachable.
1663 Cleanup regions within a must-not-throw region aren't actually reachable
1664 even if there are throwing stmts within them, because the personality
1665 routine will call terminate before unwinding. */
1668 cleanup_is_dead_in (eh_region reg
)
1670 while (reg
&& reg
->type
== ERT_CLEANUP
)
1672 return (reg
&& reg
->type
== ERT_MUST_NOT_THROW
);
1675 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1676 to a sequence of labels and blocks, plus the exception region trees
1677 that record all the magic. This is complicated by the need to
1678 arrange for the FINALLY block to be executed on all exits. */
1681 lower_try_finally (struct leh_state
*state
, gtry
*tp
)
1683 struct leh_tf_state this_tf
;
1684 struct leh_state this_state
;
1686 gimple_seq old_eh_seq
;
1688 /* Process the try block. */
1690 memset (&this_tf
, 0, sizeof (this_tf
));
1691 this_tf
.try_finally_expr
= tp
;
1693 this_tf
.outer
= state
;
1694 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state
->cur_region
))
1696 this_tf
.region
= gen_eh_region_cleanup (state
->cur_region
);
1697 this_state
.cur_region
= this_tf
.region
;
1701 this_tf
.region
= NULL
;
1702 this_state
.cur_region
= state
->cur_region
;
1705 this_state
.ehp_region
= state
->ehp_region
;
1706 this_state
.tf
= &this_tf
;
1708 old_eh_seq
= eh_seq
;
1711 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1713 /* Determine if the try block is escaped through the bottom. */
1714 this_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1716 /* Determine if any exceptions are possible within the try block. */
1718 this_tf
.may_throw
= eh_region_may_contain_throw (this_tf
.region
);
1719 if (this_tf
.may_throw
)
1720 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1722 /* Determine how many edges (still) reach the finally block. Or rather,
1723 how many destinations are reached by the finally block. Use this to
1724 determine how we process the finally block itself. */
1726 ndests
= this_tf
.dest_array
.length ();
1727 ndests
+= this_tf
.may_fallthru
;
1728 ndests
+= this_tf
.may_return
;
1729 ndests
+= this_tf
.may_throw
;
1731 /* If the FINALLY block is not reachable, dike it out. */
1734 gimple_seq_add_seq (&this_tf
.top_p_seq
, gimple_try_eval (tp
));
1735 gimple_try_set_cleanup (tp
, NULL
);
1737 /* If the finally block doesn't fall through, then any destination
1738 we might try to impose there isn't reached either. There may be
1739 some minor amount of cleanup and redirection still needed. */
1740 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp
)))
1741 lower_try_finally_nofallthru (state
, &this_tf
);
1743 /* We can easily special-case redirection to a single destination. */
1744 else if (ndests
== 1)
1745 lower_try_finally_onedest (state
, &this_tf
);
1746 else if (decide_copy_try_finally (ndests
, this_tf
.may_throw
,
1747 gimple_try_cleanup (tp
)))
1748 lower_try_finally_copy (state
, &this_tf
);
1750 lower_try_finally_switch (state
, &this_tf
);
1752 /* If someone requested we add a label at the end of the transformed
1754 if (this_tf
.fallthru_label
)
1756 /* This must be reached only if ndests == 0. */
1757 gimple x
= gimple_build_label (this_tf
.fallthru_label
);
1758 gimple_seq_add_stmt (&this_tf
.top_p_seq
, x
);
1761 this_tf
.dest_array
.release ();
1762 free (this_tf
.goto_queue
);
1763 if (this_tf
.goto_queue_map
)
1764 delete this_tf
.goto_queue_map
;
1766 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1767 If there was no old eh_seq, then the append is trivially already done. */
1771 eh_seq
= old_eh_seq
;
1774 gimple_seq new_eh_seq
= eh_seq
;
1775 eh_seq
= old_eh_seq
;
1776 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1780 return this_tf
.top_p_seq
;
1783 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1784 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1785 exception region trees that records all the magic. */
1788 lower_catch (struct leh_state
*state
, gtry
*tp
)
1790 eh_region try_region
= NULL
;
1791 struct leh_state this_state
= *state
;
1792 gimple_stmt_iterator gsi
;
1794 gimple_seq new_seq
, cleanup
;
1796 location_t try_catch_loc
= gimple_location (tp
);
1798 if (flag_exceptions
)
1800 try_region
= gen_eh_region_try (state
->cur_region
);
1801 this_state
.cur_region
= try_region
;
1804 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1806 if (!eh_region_may_contain_throw (try_region
))
1807 return gimple_try_eval (tp
);
1810 emit_eh_dispatch (&new_seq
, try_region
);
1811 emit_resx (&new_seq
, try_region
);
1813 this_state
.cur_region
= state
->cur_region
;
1814 this_state
.ehp_region
= try_region
;
1816 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1817 itself, so that e.g. for coverage purposes the nested cleanups don't
1818 appear before the cleanup body. See PR64634 for details. */
1819 gimple_seq old_eh_seq
= eh_seq
;
1823 cleanup
= gimple_try_cleanup (tp
);
1824 for (gsi
= gsi_start (cleanup
);
1832 catch_stmt
= as_a
<gcatch
*> (gsi_stmt (gsi
));
1833 c
= gen_eh_region_catch (try_region
, gimple_catch_types (catch_stmt
));
1835 handler
= gimple_catch_handler (catch_stmt
);
1836 lower_eh_constructs_1 (&this_state
, &handler
);
1838 c
->label
= create_artificial_label (UNKNOWN_LOCATION
);
1839 x
= gimple_build_label (c
->label
);
1840 gimple_seq_add_stmt (&new_seq
, x
);
1842 gimple_seq_add_seq (&new_seq
, handler
);
1844 if (gimple_seq_may_fallthru (new_seq
))
1847 out_label
= create_artificial_label (try_catch_loc
);
1849 x
= gimple_build_goto (out_label
);
1850 gimple_seq_add_stmt (&new_seq
, x
);
1856 gimple_try_set_cleanup (tp
, new_seq
);
1858 gimple_seq new_eh_seq
= eh_seq
;
1859 eh_seq
= old_eh_seq
;
1860 gimple_seq ret_seq
= frob_into_branch_around (tp
, try_region
, out_label
);
1861 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1865 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1866 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1867 region trees that record all the magic. */
1870 lower_eh_filter (struct leh_state
*state
, gtry
*tp
)
1872 struct leh_state this_state
= *state
;
1873 eh_region this_region
= NULL
;
1877 inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1879 if (flag_exceptions
)
1881 this_region
= gen_eh_region_allowed (state
->cur_region
,
1882 gimple_eh_filter_types (inner
));
1883 this_state
.cur_region
= this_region
;
1886 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1888 if (!eh_region_may_contain_throw (this_region
))
1889 return gimple_try_eval (tp
);
1892 this_state
.cur_region
= state
->cur_region
;
1893 this_state
.ehp_region
= this_region
;
1895 emit_eh_dispatch (&new_seq
, this_region
);
1896 emit_resx (&new_seq
, this_region
);
1898 this_region
->u
.allowed
.label
= create_artificial_label (UNKNOWN_LOCATION
);
1899 x
= gimple_build_label (this_region
->u
.allowed
.label
);
1900 gimple_seq_add_stmt (&new_seq
, x
);
1902 lower_eh_constructs_1 (&this_state
, gimple_eh_filter_failure_ptr (inner
));
1903 gimple_seq_add_seq (&new_seq
, gimple_eh_filter_failure (inner
));
1905 gimple_try_set_cleanup (tp
, new_seq
);
1907 return frob_into_branch_around (tp
, this_region
, NULL
);
1910 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1911 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1912 plus the exception region trees that record all the magic. */
1915 lower_eh_must_not_throw (struct leh_state
*state
, gtry
*tp
)
1917 struct leh_state this_state
= *state
;
1919 if (flag_exceptions
)
1921 gimple inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1922 eh_region this_region
;
1924 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1925 this_region
->u
.must_not_throw
.failure_decl
1926 = gimple_eh_must_not_throw_fndecl (
1927 as_a
<geh_mnt
*> (inner
));
1928 this_region
->u
.must_not_throw
.failure_loc
1929 = LOCATION_LOCUS (gimple_location (tp
));
1931 /* In order to get mangling applied to this decl, we must mark it
1932 used now. Otherwise, pass_ipa_free_lang_data won't think it
1934 TREE_USED (this_region
->u
.must_not_throw
.failure_decl
) = 1;
1936 this_state
.cur_region
= this_region
;
1939 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1941 return gimple_try_eval (tp
);
1944 /* Implement a cleanup expression. This is similar to try-finally,
1945 except that we only execute the cleanup block for exception edges. */
1948 lower_cleanup (struct leh_state
*state
, gtry
*tp
)
1950 struct leh_state this_state
= *state
;
1951 eh_region this_region
= NULL
;
1952 struct leh_tf_state fake_tf
;
1954 bool cleanup_dead
= cleanup_is_dead_in (state
->cur_region
);
1956 if (flag_exceptions
&& !cleanup_dead
)
1958 this_region
= gen_eh_region_cleanup (state
->cur_region
);
1959 this_state
.cur_region
= this_region
;
1962 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1964 if (cleanup_dead
|| !eh_region_may_contain_throw (this_region
))
1965 return gimple_try_eval (tp
);
1967 /* Build enough of a try-finally state so that we can reuse
1968 honor_protect_cleanup_actions. */
1969 memset (&fake_tf
, 0, sizeof (fake_tf
));
1970 fake_tf
.top_p
= fake_tf
.try_finally_expr
= tp
;
1971 fake_tf
.outer
= state
;
1972 fake_tf
.region
= this_region
;
1973 fake_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1974 fake_tf
.may_throw
= true;
1976 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1978 if (fake_tf
.may_throw
)
1980 /* In this case honor_protect_cleanup_actions had nothing to do,
1981 and we should process this normally. */
1982 lower_eh_constructs_1 (state
, gimple_try_cleanup_ptr (tp
));
1983 result
= frob_into_branch_around (tp
, this_region
,
1984 fake_tf
.fallthru_label
);
1988 /* In this case honor_protect_cleanup_actions did nearly all of
1989 the work. All we have left is to append the fallthru_label. */
1991 result
= gimple_try_eval (tp
);
1992 if (fake_tf
.fallthru_label
)
1994 gimple x
= gimple_build_label (fake_tf
.fallthru_label
);
1995 gimple_seq_add_stmt (&result
, x
);
2001 /* Main loop for lowering eh constructs. Also moves gsi to the next
2005 lower_eh_constructs_2 (struct leh_state
*state
, gimple_stmt_iterator
*gsi
)
2009 gimple stmt
= gsi_stmt (*gsi
);
2011 switch (gimple_code (stmt
))
2015 tree fndecl
= gimple_call_fndecl (stmt
);
2018 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2019 switch (DECL_FUNCTION_CODE (fndecl
))
2021 case BUILT_IN_EH_POINTER
:
2022 /* The front end may have generated a call to
2023 __builtin_eh_pointer (0) within a catch region. Replace
2024 this zero argument with the current catch region number. */
2025 if (state
->ehp_region
)
2027 tree nr
= build_int_cst (integer_type_node
,
2028 state
->ehp_region
->index
);
2029 gimple_call_set_arg (stmt
, 0, nr
);
2033 /* The user has dome something silly. Remove it. */
2034 rhs
= null_pointer_node
;
2039 case BUILT_IN_EH_FILTER
:
2040 /* ??? This should never appear, but since it's a builtin it
2041 is accessible to abuse by users. Just remove it and
2042 replace the use with the arbitrary value zero. */
2043 rhs
= build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
2045 lhs
= gimple_call_lhs (stmt
);
2046 x
= gimple_build_assign (lhs
, rhs
);
2047 gsi_insert_before (gsi
, x
, GSI_SAME_STMT
);
2050 case BUILT_IN_EH_COPY_VALUES
:
2051 /* Likewise this should not appear. Remove it. */
2052 gsi_remove (gsi
, true);
2062 /* If the stmt can throw use a new temporary for the assignment
2063 to a LHS. This makes sure the old value of the LHS is
2064 available on the EH edge. Only do so for statements that
2065 potentially fall through (no noreturn calls e.g.), otherwise
2066 this new assignment might create fake fallthru regions. */
2067 if (stmt_could_throw_p (stmt
)
2068 && gimple_has_lhs (stmt
)
2069 && gimple_stmt_may_fallthru (stmt
)
2070 && !tree_could_throw_p (gimple_get_lhs (stmt
))
2071 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
2073 tree lhs
= gimple_get_lhs (stmt
);
2074 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
2075 gimple s
= gimple_build_assign (lhs
, tmp
);
2076 gimple_set_location (s
, gimple_location (stmt
));
2077 gimple_set_block (s
, gimple_block (stmt
));
2078 gimple_set_lhs (stmt
, tmp
);
2079 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
2080 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
2081 DECL_GIMPLE_REG_P (tmp
) = 1;
2082 gsi_insert_after (gsi
, s
, GSI_SAME_STMT
);
2084 /* Look for things that can throw exceptions, and record them. */
2085 if (state
->cur_region
&& stmt_could_throw_p (stmt
))
2087 record_stmt_eh_region (state
->cur_region
, stmt
);
2088 note_eh_region_may_contain_throw (state
->cur_region
);
2095 maybe_record_in_goto_queue (state
, stmt
);
2099 verify_norecord_switch_expr (state
, as_a
<gswitch
*> (stmt
));
2104 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2105 if (gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2106 replace
= lower_try_finally (state
, try_stmt
);
2109 x
= gimple_seq_first_stmt (gimple_try_cleanup (try_stmt
));
2112 replace
= gimple_try_eval (try_stmt
);
2113 lower_eh_constructs_1 (state
, &replace
);
2116 switch (gimple_code (x
))
2119 replace
= lower_catch (state
, try_stmt
);
2121 case GIMPLE_EH_FILTER
:
2122 replace
= lower_eh_filter (state
, try_stmt
);
2124 case GIMPLE_EH_MUST_NOT_THROW
:
2125 replace
= lower_eh_must_not_throw (state
, try_stmt
);
2127 case GIMPLE_EH_ELSE
:
2128 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2131 replace
= lower_cleanup (state
, try_stmt
);
2137 /* Remove the old stmt and insert the transformed sequence
2139 gsi_insert_seq_before (gsi
, replace
, GSI_SAME_STMT
);
2140 gsi_remove (gsi
, true);
2142 /* Return since we don't want gsi_next () */
2145 case GIMPLE_EH_ELSE
:
2146 /* We should be eliminating this in lower_try_finally et al. */
2150 /* A type, a decl, or some kind of statement that we're not
2151 interested in. Don't walk them. */
2158 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2161 lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*pseq
)
2163 gimple_stmt_iterator gsi
;
2164 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
);)
2165 lower_eh_constructs_2 (state
, &gsi
);
2170 const pass_data pass_data_lower_eh
=
2172 GIMPLE_PASS
, /* type */
2174 OPTGROUP_NONE
, /* optinfo_flags */
2175 TV_TREE_EH
, /* tv_id */
2176 PROP_gimple_lcf
, /* properties_required */
2177 PROP_gimple_leh
, /* properties_provided */
2178 0, /* properties_destroyed */
2179 0, /* todo_flags_start */
2180 0, /* todo_flags_finish */
2183 class pass_lower_eh
: public gimple_opt_pass
2186 pass_lower_eh (gcc::context
*ctxt
)
2187 : gimple_opt_pass (pass_data_lower_eh
, ctxt
)
2190 /* opt_pass methods: */
2191 virtual unsigned int execute (function
*);
2193 }; // class pass_lower_eh
2196 pass_lower_eh::execute (function
*fun
)
2198 struct leh_state null_state
;
2201 bodyp
= gimple_body (current_function_decl
);
2205 finally_tree
= new hash_table
<finally_tree_hasher
> (31);
2206 eh_region_may_contain_throw_map
= BITMAP_ALLOC (NULL
);
2207 memset (&null_state
, 0, sizeof (null_state
));
2209 collect_finally_tree_1 (bodyp
, NULL
);
2210 lower_eh_constructs_1 (&null_state
, &bodyp
);
2211 gimple_set_body (current_function_decl
, bodyp
);
2213 /* We assume there's a return statement, or something, at the end of
2214 the function, and thus ploping the EH sequence afterward won't
2216 gcc_assert (!gimple_seq_may_fallthru (bodyp
));
2217 gimple_seq_add_seq (&bodyp
, eh_seq
);
2219 /* We assume that since BODYP already existed, adding EH_SEQ to it
2220 didn't change its value, and we don't have to re-set the function. */
2221 gcc_assert (bodyp
== gimple_body (current_function_decl
));
2223 delete finally_tree
;
2224 finally_tree
= NULL
;
2225 BITMAP_FREE (eh_region_may_contain_throw_map
);
2228 /* If this function needs a language specific EH personality routine
2229 and the frontend didn't already set one do so now. */
2230 if (function_needs_eh_personality (fun
) == eh_personality_lang
2231 && !DECL_FUNCTION_PERSONALITY (current_function_decl
))
2232 DECL_FUNCTION_PERSONALITY (current_function_decl
)
2233 = lang_hooks
.eh_personality ();
2241 make_pass_lower_eh (gcc::context
*ctxt
)
2243 return new pass_lower_eh (ctxt
);
2246 /* Create the multiple edges from an EH_DISPATCH statement to all of
2247 the possible handlers for its EH region. Return true if there's
2248 no fallthru edge; false if there is. */
2251 make_eh_dispatch_edges (geh_dispatch
*stmt
)
2255 basic_block src
, dst
;
2257 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2258 src
= gimple_bb (stmt
);
2263 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2265 dst
= label_to_block (c
->label
);
2266 make_edge (src
, dst
, 0);
2268 /* A catch-all handler doesn't have a fallthru. */
2269 if (c
->type_list
== NULL
)
2274 case ERT_ALLOWED_EXCEPTIONS
:
2275 dst
= label_to_block (r
->u
.allowed
.label
);
2276 make_edge (src
, dst
, 0);
2286 /* Create the single EH edge from STMT to its nearest landing pad,
2287 if there is such a landing pad within the current function. */
2290 make_eh_edges (gimple stmt
)
2292 basic_block src
, dst
;
2296 lp_nr
= lookup_stmt_eh_lp (stmt
);
2300 lp
= get_eh_landing_pad_from_number (lp_nr
);
2301 gcc_assert (lp
!= NULL
);
2303 src
= gimple_bb (stmt
);
2304 dst
= label_to_block (lp
->post_landing_pad
);
2305 make_edge (src
, dst
, EDGE_EH
);
2308 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2309 do not actually perform the final edge redirection.
2311 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2312 we intend to change the destination EH region as well; this means
2313 EH_LANDING_PAD_NR must already be set on the destination block label.
2314 If false, we're being called from generic cfg manipulation code and we
2315 should preserve our place within the region tree. */
2318 redirect_eh_edge_1 (edge edge_in
, basic_block new_bb
, bool change_region
)
2320 eh_landing_pad old_lp
, new_lp
;
2323 int old_lp_nr
, new_lp_nr
;
2324 tree old_label
, new_label
;
2328 old_bb
= edge_in
->dest
;
2329 old_label
= gimple_block_label (old_bb
);
2330 old_lp_nr
= EH_LANDING_PAD_NR (old_label
);
2331 gcc_assert (old_lp_nr
> 0);
2332 old_lp
= get_eh_landing_pad_from_number (old_lp_nr
);
2334 throw_stmt
= last_stmt (edge_in
->src
);
2335 gcc_assert (lookup_stmt_eh_lp (throw_stmt
) == old_lp_nr
);
2337 new_label
= gimple_block_label (new_bb
);
2339 /* Look for an existing region that might be using NEW_BB already. */
2340 new_lp_nr
= EH_LANDING_PAD_NR (new_label
);
2343 new_lp
= get_eh_landing_pad_from_number (new_lp_nr
);
2344 gcc_assert (new_lp
);
2346 /* Unless CHANGE_REGION is true, the new and old landing pad
2347 had better be associated with the same EH region. */
2348 gcc_assert (change_region
|| new_lp
->region
== old_lp
->region
);
2353 gcc_assert (!change_region
);
2356 /* Notice when we redirect the last EH edge away from OLD_BB. */
2357 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
2358 if (e
!= edge_in
&& (e
->flags
& EDGE_EH
))
2363 /* NEW_LP already exists. If there are still edges into OLD_LP,
2364 there's nothing to do with the EH tree. If there are no more
2365 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2366 If CHANGE_REGION is true, then our caller is expecting to remove
2368 if (e
== NULL
&& !change_region
)
2369 remove_eh_landing_pad (old_lp
);
2373 /* No correct landing pad exists. If there are no more edges
2374 into OLD_LP, then we can simply re-use the existing landing pad.
2375 Otherwise, we have to create a new landing pad. */
2378 EH_LANDING_PAD_NR (old_lp
->post_landing_pad
) = 0;
2382 new_lp
= gen_eh_landing_pad (old_lp
->region
);
2383 new_lp
->post_landing_pad
= new_label
;
2384 EH_LANDING_PAD_NR (new_label
) = new_lp
->index
;
2387 /* Maybe move the throwing statement to the new region. */
2388 if (old_lp
!= new_lp
)
2390 remove_stmt_from_eh_lp (throw_stmt
);
2391 add_stmt_to_eh_lp (throw_stmt
, new_lp
->index
);
2395 /* Redirect EH edge E to NEW_BB. */
2398 redirect_eh_edge (edge edge_in
, basic_block new_bb
)
2400 redirect_eh_edge_1 (edge_in
, new_bb
, false);
2401 return ssa_redirect_edge (edge_in
, new_bb
);
2404 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2405 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2406 The actual edge update will happen in the caller. */
2409 redirect_eh_dispatch_edge (geh_dispatch
*stmt
, edge e
, basic_block new_bb
)
2411 tree new_lab
= gimple_block_label (new_bb
);
2412 bool any_changed
= false;
2417 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2421 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2423 old_bb
= label_to_block (c
->label
);
2424 if (old_bb
== e
->dest
)
2432 case ERT_ALLOWED_EXCEPTIONS
:
2433 old_bb
= label_to_block (r
->u
.allowed
.label
);
2434 gcc_assert (old_bb
== e
->dest
);
2435 r
->u
.allowed
.label
= new_lab
;
2443 gcc_assert (any_changed
);
2446 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2449 operation_could_trap_helper_p (enum tree_code op
,
2460 case TRUNC_DIV_EXPR
:
2462 case FLOOR_DIV_EXPR
:
2463 case ROUND_DIV_EXPR
:
2464 case EXACT_DIV_EXPR
:
2466 case FLOOR_MOD_EXPR
:
2467 case ROUND_MOD_EXPR
:
2468 case TRUNC_MOD_EXPR
:
2470 if (honor_snans
|| honor_trapv
)
2473 return flag_trapping_math
;
2474 if (!TREE_CONSTANT (divisor
) || integer_zerop (divisor
))
2483 /* Some floating point comparisons may trap. */
2488 case UNORDERED_EXPR
:
2500 /* These operations don't trap with floating point. */
2508 /* Any floating arithmetic may trap. */
2509 if (fp_operation
&& flag_trapping_math
)
2517 /* Constructing an object cannot trap. */
2521 /* Any floating arithmetic may trap. */
2522 if (fp_operation
&& flag_trapping_math
)
2530 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2531 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2532 type operands that may trap. If OP is a division operator, DIVISOR contains
2533 the value of the divisor. */
2536 operation_could_trap_p (enum tree_code op
, bool fp_operation
, bool honor_trapv
,
2539 bool honor_nans
= (fp_operation
&& flag_trapping_math
2540 && !flag_finite_math_only
);
2541 bool honor_snans
= fp_operation
&& flag_signaling_nans
!= 0;
2544 if (TREE_CODE_CLASS (op
) != tcc_comparison
2545 && TREE_CODE_CLASS (op
) != tcc_unary
2546 && TREE_CODE_CLASS (op
) != tcc_binary
)
2549 return operation_could_trap_helper_p (op
, fp_operation
, honor_trapv
,
2550 honor_nans
, honor_snans
, divisor
,
2555 /* Returns true if it is possible to prove that the index of
2556 an array access REF (an ARRAY_REF expression) falls into the
2560 in_array_bounds_p (tree ref
)
2562 tree idx
= TREE_OPERAND (ref
, 1);
2565 if (TREE_CODE (idx
) != INTEGER_CST
)
2568 min
= array_ref_low_bound (ref
);
2569 max
= array_ref_up_bound (ref
);
2572 || TREE_CODE (min
) != INTEGER_CST
2573 || TREE_CODE (max
) != INTEGER_CST
)
2576 if (tree_int_cst_lt (idx
, min
)
2577 || tree_int_cst_lt (max
, idx
))
2583 /* Returns true if it is possible to prove that the range of
2584 an array access REF (an ARRAY_RANGE_REF expression) falls
2585 into the array bounds. */
2588 range_in_array_bounds_p (tree ref
)
2590 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ref
));
2591 tree range_min
, range_max
, min
, max
;
2593 range_min
= TYPE_MIN_VALUE (domain_type
);
2594 range_max
= TYPE_MAX_VALUE (domain_type
);
2597 || TREE_CODE (range_min
) != INTEGER_CST
2598 || TREE_CODE (range_max
) != INTEGER_CST
)
2601 min
= array_ref_low_bound (ref
);
2602 max
= array_ref_up_bound (ref
);
2605 || TREE_CODE (min
) != INTEGER_CST
2606 || TREE_CODE (max
) != INTEGER_CST
)
2609 if (tree_int_cst_lt (range_min
, min
)
2610 || tree_int_cst_lt (max
, range_max
))
2616 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2617 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2618 This routine expects only GIMPLE lhs or rhs input. */
2621 tree_could_trap_p (tree expr
)
2623 enum tree_code code
;
2624 bool fp_operation
= false;
2625 bool honor_trapv
= false;
2626 tree t
, base
, div
= NULL_TREE
;
2631 code
= TREE_CODE (expr
);
2632 t
= TREE_TYPE (expr
);
2636 if (COMPARISON_CLASS_P (expr
))
2637 fp_operation
= FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2639 fp_operation
= FLOAT_TYPE_P (t
);
2640 honor_trapv
= INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
);
2643 if (TREE_CODE_CLASS (code
) == tcc_binary
)
2644 div
= TREE_OPERAND (expr
, 1);
2645 if (operation_could_trap_p (code
, fp_operation
, honor_trapv
, div
))
2655 case VIEW_CONVERT_EXPR
:
2656 case WITH_SIZE_EXPR
:
2657 expr
= TREE_OPERAND (expr
, 0);
2658 code
= TREE_CODE (expr
);
2661 case ARRAY_RANGE_REF
:
2662 base
= TREE_OPERAND (expr
, 0);
2663 if (tree_could_trap_p (base
))
2665 if (TREE_THIS_NOTRAP (expr
))
2667 return !range_in_array_bounds_p (expr
);
2670 base
= TREE_OPERAND (expr
, 0);
2671 if (tree_could_trap_p (base
))
2673 if (TREE_THIS_NOTRAP (expr
))
2675 return !in_array_bounds_p (expr
);
2677 case TARGET_MEM_REF
:
2679 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
2680 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
2682 if (TREE_THIS_NOTRAP (expr
))
2684 /* We cannot prove that the access is in-bounds when we have
2685 variable-index TARGET_MEM_REFs. */
2686 if (code
== TARGET_MEM_REF
2687 && (TMR_INDEX (expr
) || TMR_INDEX2 (expr
)))
2689 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
2691 tree base
= TREE_OPERAND (TREE_OPERAND (expr
, 0), 0);
2692 offset_int off
= mem_ref_offset (expr
);
2693 if (wi::neg_p (off
, SIGNED
))
2695 if (TREE_CODE (base
) == STRING_CST
)
2696 return wi::leu_p (TREE_STRING_LENGTH (base
), off
);
2697 else if (DECL_SIZE_UNIT (base
) == NULL_TREE
2698 || TREE_CODE (DECL_SIZE_UNIT (base
)) != INTEGER_CST
2699 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base
)), off
))
2701 /* Now we are sure the first byte of the access is inside
2708 return !TREE_THIS_NOTRAP (expr
);
2711 return TREE_THIS_VOLATILE (expr
);
2714 t
= get_callee_fndecl (expr
);
2715 /* Assume that calls to weak functions may trap. */
2716 if (!t
|| !DECL_P (t
))
2719 return tree_could_trap_p (t
);
2723 /* Assume that accesses to weak functions may trap, unless we know
2724 they are certainly defined in current TU or in some other
2726 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2728 cgraph_node
*node
= cgraph_node::get (expr
);
2730 node
= node
->function_symbol ();
2731 return !(node
&& node
->in_other_partition
);
2736 /* Assume that accesses to weak vars may trap, unless we know
2737 they are certainly defined in current TU or in some other
2739 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2741 varpool_node
*node
= varpool_node::get (expr
);
2743 node
= node
->ultimate_alias_target ();
2744 return !(node
&& node
->in_other_partition
);
2754 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2755 an assignment or a conditional) may throw. */
2758 stmt_could_throw_1_p (gimple stmt
)
2760 enum tree_code code
= gimple_expr_code (stmt
);
2761 bool honor_nans
= false;
2762 bool honor_snans
= false;
2763 bool fp_operation
= false;
2764 bool honor_trapv
= false;
2769 if (TREE_CODE_CLASS (code
) == tcc_comparison
2770 || TREE_CODE_CLASS (code
) == tcc_unary
2771 || TREE_CODE_CLASS (code
) == tcc_binary
)
2773 if (is_gimple_assign (stmt
)
2774 && TREE_CODE_CLASS (code
) == tcc_comparison
)
2775 t
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2776 else if (gimple_code (stmt
) == GIMPLE_COND
)
2777 t
= TREE_TYPE (gimple_cond_lhs (stmt
));
2779 t
= gimple_expr_type (stmt
);
2780 fp_operation
= FLOAT_TYPE_P (t
);
2783 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
2784 honor_snans
= flag_signaling_nans
!= 0;
2786 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
2790 /* Check if the main expression may trap. */
2791 t
= is_gimple_assign (stmt
) ? gimple_assign_rhs2 (stmt
) : NULL
;
2792 ret
= operation_could_trap_helper_p (code
, fp_operation
, honor_trapv
,
2793 honor_nans
, honor_snans
, t
,
2798 /* If the expression does not trap, see if any of the individual operands may
2800 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2801 if (tree_could_trap_p (gimple_op (stmt
, i
)))
2808 /* Return true if statement STMT could throw an exception. */
2811 stmt_could_throw_p (gimple stmt
)
2813 if (!flag_exceptions
)
2816 /* The only statements that can throw an exception are assignments,
2817 conditionals, calls, resx, and asms. */
2818 switch (gimple_code (stmt
))
2824 return !gimple_call_nothrow_p (as_a
<gcall
*> (stmt
));
2828 if (!cfun
->can_throw_non_call_exceptions
)
2830 return stmt_could_throw_1_p (stmt
);
2833 if (!cfun
->can_throw_non_call_exceptions
)
2835 return gimple_asm_volatile_p (as_a
<gasm
*> (stmt
));
2843 /* Return true if expression T could throw an exception. */
2846 tree_could_throw_p (tree t
)
2848 if (!flag_exceptions
)
2850 if (TREE_CODE (t
) == MODIFY_EXPR
)
2852 if (cfun
->can_throw_non_call_exceptions
2853 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2855 t
= TREE_OPERAND (t
, 1);
2858 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2859 t
= TREE_OPERAND (t
, 0);
2860 if (TREE_CODE (t
) == CALL_EXPR
)
2861 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2862 if (cfun
->can_throw_non_call_exceptions
)
2863 return tree_could_trap_p (t
);
2867 /* Return true if STMT can throw an exception that is not caught within
2868 the current function (CFUN). */
2871 stmt_can_throw_external (gimple stmt
)
2875 if (!stmt_could_throw_p (stmt
))
2878 lp_nr
= lookup_stmt_eh_lp (stmt
);
2882 /* Return true if STMT can throw an exception that is caught within
2883 the current function (CFUN). */
2886 stmt_can_throw_internal (gimple stmt
)
2890 if (!stmt_could_throw_p (stmt
))
2893 lp_nr
= lookup_stmt_eh_lp (stmt
);
2897 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2898 remove any entry it might have from the EH table. Return true if
2899 any change was made. */
2902 maybe_clean_eh_stmt_fn (struct function
*ifun
, gimple stmt
)
2904 if (stmt_could_throw_p (stmt
))
2906 return remove_stmt_from_eh_lp_fn (ifun
, stmt
);
2909 /* Likewise, but always use the current function. */
2912 maybe_clean_eh_stmt (gimple stmt
)
2914 return maybe_clean_eh_stmt_fn (cfun
, stmt
);
2917 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2918 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2919 in the table if it should be in there. Return TRUE if a replacement was
2920 done that my require an EH edge purge. */
2923 maybe_clean_or_replace_eh_stmt (gimple old_stmt
, gimple new_stmt
)
2925 int lp_nr
= lookup_stmt_eh_lp (old_stmt
);
2929 bool new_stmt_could_throw
= stmt_could_throw_p (new_stmt
);
2931 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2934 remove_stmt_from_eh_lp (old_stmt
);
2935 if (new_stmt_could_throw
)
2937 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
2947 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2948 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2949 operand is the return value of duplicate_eh_regions. */
2952 maybe_duplicate_eh_stmt_fn (struct function
*new_fun
, gimple new_stmt
,
2953 struct function
*old_fun
, gimple old_stmt
,
2954 hash_map
<void *, void *> *map
,
2957 int old_lp_nr
, new_lp_nr
;
2959 if (!stmt_could_throw_p (new_stmt
))
2962 old_lp_nr
= lookup_stmt_eh_lp_fn (old_fun
, old_stmt
);
2965 if (default_lp_nr
== 0)
2967 new_lp_nr
= default_lp_nr
;
2969 else if (old_lp_nr
> 0)
2971 eh_landing_pad old_lp
, new_lp
;
2973 old_lp
= (*old_fun
->eh
->lp_array
)[old_lp_nr
];
2974 new_lp
= static_cast<eh_landing_pad
> (*map
->get (old_lp
));
2975 new_lp_nr
= new_lp
->index
;
2979 eh_region old_r
, new_r
;
2981 old_r
= (*old_fun
->eh
->region_array
)[-old_lp_nr
];
2982 new_r
= static_cast<eh_region
> (*map
->get (old_r
));
2983 new_lp_nr
= -new_r
->index
;
2986 add_stmt_to_eh_lp_fn (new_fun
, new_stmt
, new_lp_nr
);
2990 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2991 and thus no remapping is required. */
2994 maybe_duplicate_eh_stmt (gimple new_stmt
, gimple old_stmt
)
2998 if (!stmt_could_throw_p (new_stmt
))
3001 lp_nr
= lookup_stmt_eh_lp (old_stmt
);
3005 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
3009 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3010 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3011 this only handles handlers consisting of a single call, as that's the
3012 important case for C++: a destructor call for a particular object showing
3013 up in multiple handlers. */
3016 same_handler_p (gimple_seq oneh
, gimple_seq twoh
)
3018 gimple_stmt_iterator gsi
;
3022 gsi
= gsi_start (oneh
);
3023 if (!gsi_one_before_end_p (gsi
))
3025 ones
= gsi_stmt (gsi
);
3027 gsi
= gsi_start (twoh
);
3028 if (!gsi_one_before_end_p (gsi
))
3030 twos
= gsi_stmt (gsi
);
3032 if (!is_gimple_call (ones
)
3033 || !is_gimple_call (twos
)
3034 || gimple_call_lhs (ones
)
3035 || gimple_call_lhs (twos
)
3036 || gimple_call_chain (ones
)
3037 || gimple_call_chain (twos
)
3038 || !gimple_call_same_target_p (ones
, twos
)
3039 || gimple_call_num_args (ones
) != gimple_call_num_args (twos
))
3042 for (ai
= 0; ai
< gimple_call_num_args (ones
); ++ai
)
3043 if (!operand_equal_p (gimple_call_arg (ones
, ai
),
3044 gimple_call_arg (twos
, ai
), 0))
3051 try { A() } finally { try { ~B() } catch { ~A() } }
3052 try { ... } finally { ~A() }
3054 try { A() } catch { ~B() }
3055 try { ~B() ... } finally { ~A() }
3057 This occurs frequently in C++, where A is a local variable and B is a
3058 temporary used in the initializer for A. */
3061 optimize_double_finally (gtry
*one
, gtry
*two
)
3064 gimple_stmt_iterator gsi
;
3067 cleanup
= gimple_try_cleanup (one
);
3068 gsi
= gsi_start (cleanup
);
3069 if (!gsi_one_before_end_p (gsi
))
3072 oneh
= gsi_stmt (gsi
);
3073 if (gimple_code (oneh
) != GIMPLE_TRY
3074 || gimple_try_kind (oneh
) != GIMPLE_TRY_CATCH
)
3077 if (same_handler_p (gimple_try_cleanup (oneh
), gimple_try_cleanup (two
)))
3079 gimple_seq seq
= gimple_try_eval (oneh
);
3081 gimple_try_set_cleanup (one
, seq
);
3082 gimple_try_set_kind (one
, GIMPLE_TRY_CATCH
);
3083 seq
= copy_gimple_seq_and_replace_locals (seq
);
3084 gimple_seq_add_seq (&seq
, gimple_try_eval (two
));
3085 gimple_try_set_eval (two
, seq
);
3089 /* Perform EH refactoring optimizations that are simpler to do when code
3090 flow has been lowered but EH structures haven't. */
3093 refactor_eh_r (gimple_seq seq
)
3095 gimple_stmt_iterator gsi
;
3100 gsi
= gsi_start (seq
);
3104 if (gsi_end_p (gsi
))
3107 two
= gsi_stmt (gsi
);
3109 if (gtry
*try_one
= dyn_cast
<gtry
*> (one
))
3110 if (gtry
*try_two
= dyn_cast
<gtry
*> (two
))
3111 if (gimple_try_kind (try_one
) == GIMPLE_TRY_FINALLY
3112 && gimple_try_kind (try_two
) == GIMPLE_TRY_FINALLY
)
3113 optimize_double_finally (try_one
, try_two
);
3115 switch (gimple_code (one
))
3118 refactor_eh_r (gimple_try_eval (one
));
3119 refactor_eh_r (gimple_try_cleanup (one
));
3122 refactor_eh_r (gimple_catch_handler (as_a
<gcatch
*> (one
)));
3124 case GIMPLE_EH_FILTER
:
3125 refactor_eh_r (gimple_eh_filter_failure (one
));
3127 case GIMPLE_EH_ELSE
:
3129 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (one
);
3130 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt
));
3131 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt
));
3146 const pass_data pass_data_refactor_eh
=
3148 GIMPLE_PASS
, /* type */
3150 OPTGROUP_NONE
, /* optinfo_flags */
3151 TV_TREE_EH
, /* tv_id */
3152 PROP_gimple_lcf
, /* properties_required */
3153 0, /* properties_provided */
3154 0, /* properties_destroyed */
3155 0, /* todo_flags_start */
3156 0, /* todo_flags_finish */
3159 class pass_refactor_eh
: public gimple_opt_pass
3162 pass_refactor_eh (gcc::context
*ctxt
)
3163 : gimple_opt_pass (pass_data_refactor_eh
, ctxt
)
3166 /* opt_pass methods: */
3167 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3168 virtual unsigned int execute (function
*)
3170 refactor_eh_r (gimple_body (current_function_decl
));
3174 }; // class pass_refactor_eh
3179 make_pass_refactor_eh (gcc::context
*ctxt
)
3181 return new pass_refactor_eh (ctxt
);
3184 /* At the end of gimple optimization, we can lower RESX. */
3187 lower_resx (basic_block bb
, gresx
*stmt
,
3188 hash_map
<eh_region
, tree
> *mnt_map
)
3191 eh_region src_r
, dst_r
;
3192 gimple_stmt_iterator gsi
;
3197 lp_nr
= lookup_stmt_eh_lp (stmt
);
3199 dst_r
= get_eh_region_from_lp_number (lp_nr
);
3203 src_r
= get_eh_region_from_number (gimple_resx_region (stmt
));
3204 gsi
= gsi_last_bb (bb
);
3208 /* We can wind up with no source region when pass_cleanup_eh shows
3209 that there are no entries into an eh region and deletes it, but
3210 then the block that contains the resx isn't removed. This can
3211 happen without optimization when the switch statement created by
3212 lower_try_finally_switch isn't simplified to remove the eh case.
3214 Resolve this by expanding the resx node to an abort. */
3216 fn
= builtin_decl_implicit (BUILT_IN_TRAP
);
3217 x
= gimple_build_call (fn
, 0);
3218 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3220 while (EDGE_COUNT (bb
->succs
) > 0)
3221 remove_edge (EDGE_SUCC (bb
, 0));
3225 /* When we have a destination region, we resolve this by copying
3226 the excptr and filter values into place, and changing the edge
3227 to immediately after the landing pad. */
3235 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3236 the failure decl into a new block, if needed. */
3237 gcc_assert (dst_r
->type
== ERT_MUST_NOT_THROW
);
3239 tree
*slot
= mnt_map
->get (dst_r
);
3242 gimple_stmt_iterator gsi2
;
3244 new_bb
= create_empty_bb (bb
);
3245 add_bb_to_loop (new_bb
, bb
->loop_father
);
3246 lab
= gimple_block_label (new_bb
);
3247 gsi2
= gsi_start_bb (new_bb
);
3249 fn
= dst_r
->u
.must_not_throw
.failure_decl
;
3250 x
= gimple_build_call (fn
, 0);
3251 gimple_set_location (x
, dst_r
->u
.must_not_throw
.failure_loc
);
3252 gsi_insert_after (&gsi2
, x
, GSI_CONTINUE_LINKING
);
3254 mnt_map
->put (dst_r
, lab
);
3259 new_bb
= label_to_block (lab
);
3262 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3263 e
= make_edge (bb
, new_bb
, EDGE_FALLTHRU
);
3264 e
->count
= bb
->count
;
3265 e
->probability
= REG_BR_PROB_BASE
;
3270 tree dst_nr
= build_int_cst (integer_type_node
, dst_r
->index
);
3272 fn
= builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES
);
3273 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3274 x
= gimple_build_call (fn
, 2, dst_nr
, src_nr
);
3275 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3277 /* Update the flags for the outgoing edge. */
3278 e
= single_succ_edge (bb
);
3279 gcc_assert (e
->flags
& EDGE_EH
);
3280 e
->flags
= (e
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
3282 /* If there are no more EH users of the landing pad, delete it. */
3283 FOR_EACH_EDGE (e
, ei
, e
->dest
->preds
)
3284 if (e
->flags
& EDGE_EH
)
3288 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
3289 remove_eh_landing_pad (lp
);
3299 /* When we don't have a destination region, this exception escapes
3300 up the call chain. We resolve this by generating a call to the
3301 _Unwind_Resume library function. */
3303 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3304 with no arguments for C++ and Java. Check for that. */
3305 if (src_r
->use_cxa_end_cleanup
)
3307 fn
= builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP
);
3308 x
= gimple_build_call (fn
, 0);
3309 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3313 fn
= builtin_decl_implicit (BUILT_IN_EH_POINTER
);
3314 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3315 x
= gimple_build_call (fn
, 1, src_nr
);
3316 var
= create_tmp_var (ptr_type_node
);
3317 var
= make_ssa_name (var
, x
);
3318 gimple_call_set_lhs (x
, var
);
3319 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3321 fn
= builtin_decl_implicit (BUILT_IN_UNWIND_RESUME
);
3322 x
= gimple_build_call (fn
, 1, var
);
3323 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3326 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3329 gsi_remove (&gsi
, true);
3336 const pass_data pass_data_lower_resx
=
3338 GIMPLE_PASS
, /* type */
3340 OPTGROUP_NONE
, /* optinfo_flags */
3341 TV_TREE_EH
, /* tv_id */
3342 PROP_gimple_lcf
, /* properties_required */
3343 0, /* properties_provided */
3344 0, /* properties_destroyed */
3345 0, /* todo_flags_start */
3346 0, /* todo_flags_finish */
3349 class pass_lower_resx
: public gimple_opt_pass
3352 pass_lower_resx (gcc::context
*ctxt
)
3353 : gimple_opt_pass (pass_data_lower_resx
, ctxt
)
3356 /* opt_pass methods: */
3357 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3358 virtual unsigned int execute (function
*);
3360 }; // class pass_lower_resx
3363 pass_lower_resx::execute (function
*fun
)
3366 bool dominance_invalidated
= false;
3367 bool any_rewritten
= false;
3369 hash_map
<eh_region
, tree
> mnt_map
;
3371 FOR_EACH_BB_FN (bb
, fun
)
3373 gimple last
= last_stmt (bb
);
3374 if (last
&& is_gimple_resx (last
))
3376 dominance_invalidated
|=
3377 lower_resx (bb
, as_a
<gresx
*> (last
), &mnt_map
);
3378 any_rewritten
= true;
3382 if (dominance_invalidated
)
3384 free_dominance_info (CDI_DOMINATORS
);
3385 free_dominance_info (CDI_POST_DOMINATORS
);
3388 return any_rewritten
? TODO_update_ssa_only_virtuals
: 0;
3394 make_pass_lower_resx (gcc::context
*ctxt
)
3396 return new pass_lower_resx (ctxt
);
3399 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3403 optimize_clobbers (basic_block bb
)
3405 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3406 bool any_clobbers
= false;
3407 bool seen_stack_restore
= false;
3411 /* Only optimize anything if the bb contains at least one clobber,
3412 ends with resx (checked by caller), optionally contains some
3413 debug stmts or labels, or at most one __builtin_stack_restore
3414 call, and has an incoming EH edge. */
3415 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3417 gimple stmt
= gsi_stmt (gsi
);
3418 if (is_gimple_debug (stmt
))
3420 if (gimple_clobber_p (stmt
))
3422 any_clobbers
= true;
3425 if (!seen_stack_restore
3426 && gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
3428 seen_stack_restore
= true;
3431 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3437 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3438 if (e
->flags
& EDGE_EH
)
3442 gsi
= gsi_last_bb (bb
);
3443 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3445 gimple stmt
= gsi_stmt (gsi
);
3446 if (!gimple_clobber_p (stmt
))
3448 unlink_stmt_vdef (stmt
);
3449 gsi_remove (&gsi
, true);
3450 release_defs (stmt
);
3454 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3455 internal throw to successor BB. */
3458 sink_clobbers (basic_block bb
)
3462 gimple_stmt_iterator gsi
, dgsi
;
3464 bool any_clobbers
= false;
3467 /* Only optimize if BB has a single EH successor and
3468 all predecessor edges are EH too. */
3469 if (!single_succ_p (bb
)
3470 || (single_succ_edge (bb
)->flags
& EDGE_EH
) == 0)
3473 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3475 if ((e
->flags
& EDGE_EH
) == 0)
3479 /* And BB contains only CLOBBER stmts before the final
3481 gsi
= gsi_last_bb (bb
);
3482 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3484 gimple stmt
= gsi_stmt (gsi
);
3485 if (is_gimple_debug (stmt
))
3487 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3489 if (!gimple_clobber_p (stmt
))
3491 any_clobbers
= true;
3496 edge succe
= single_succ_edge (bb
);
3497 succbb
= succe
->dest
;
3499 /* See if there is a virtual PHI node to take an updated virtual
3502 tree vuse
= NULL_TREE
;
3503 for (gphi_iterator gpi
= gsi_start_phis (succbb
);
3504 !gsi_end_p (gpi
); gsi_next (&gpi
))
3506 tree res
= gimple_phi_result (gpi
.phi ());
3507 if (virtual_operand_p (res
))
3515 dgsi
= gsi_after_labels (succbb
);
3516 gsi
= gsi_last_bb (bb
);
3517 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3519 gimple stmt
= gsi_stmt (gsi
);
3521 if (is_gimple_debug (stmt
))
3523 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3525 lhs
= gimple_assign_lhs (stmt
);
3526 /* Unfortunately we don't have dominance info updated at this
3527 point, so checking if
3528 dominated_by_p (CDI_DOMINATORS, succbb,
3529 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3530 would be too costly. Thus, avoid sinking any clobbers that
3531 refer to non-(D) SSA_NAMEs. */
3532 if (TREE_CODE (lhs
) == MEM_REF
3533 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
3534 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0)))
3536 unlink_stmt_vdef (stmt
);
3537 gsi_remove (&gsi
, true);
3538 release_defs (stmt
);
3542 /* As we do not change stmt order when sinking across a
3543 forwarder edge we can keep virtual operands in place. */
3544 gsi_remove (&gsi
, false);
3545 gsi_insert_before (&dgsi
, stmt
, GSI_NEW_STMT
);
3547 /* But adjust virtual operands if we sunk across a PHI node. */
3551 imm_use_iterator iter
;
3552 use_operand_p use_p
;
3553 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vuse
)
3554 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
3555 SET_USE (use_p
, gimple_vdef (stmt
));
3556 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
))
3558 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt
)) = 1;
3559 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 0;
3561 /* Adjust the incoming virtual operand. */
3562 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi
, succe
), gimple_vuse (stmt
));
3563 SET_USE (gimple_vuse_op (stmt
), vuse
);
3565 /* If there isn't a single predecessor but no virtual PHI node
3566 arrange for virtual operands to be renamed. */
3567 else if (gimple_vuse_op (stmt
) != NULL_USE_OPERAND_P
3568 && !single_pred_p (succbb
))
3570 /* In this case there will be no use of the VDEF of this stmt.
3571 ??? Unless this is a secondary opportunity and we have not
3572 removed unreachable blocks yet, so we cannot assert this.
3573 Which also means we will end up renaming too many times. */
3574 SET_USE (gimple_vuse_op (stmt
), gimple_vop (cfun
));
3575 mark_virtual_operands_for_renaming (cfun
);
3576 todo
|= TODO_update_ssa_only_virtuals
;
3583 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3584 we have found some duplicate labels and removed some edges. */
3587 lower_eh_dispatch (basic_block src
, geh_dispatch
*stmt
)
3589 gimple_stmt_iterator gsi
;
3594 bool redirected
= false;
3596 region_nr
= gimple_eh_dispatch_region (stmt
);
3597 r
= get_eh_region_from_number (region_nr
);
3599 gsi
= gsi_last_bb (src
);
3605 auto_vec
<tree
> labels
;
3606 tree default_label
= NULL
;
3610 hash_set
<tree
> seen_values
;
3612 /* Collect the labels for a switch. Zero the post_landing_pad
3613 field becase we'll no longer have anything keeping these labels
3614 in existence and the optimizer will be free to merge these
3616 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
3618 tree tp_node
, flt_node
, lab
= c
->label
;
3619 bool have_label
= false;
3622 tp_node
= c
->type_list
;
3623 flt_node
= c
->filter_list
;
3625 if (tp_node
== NULL
)
3627 default_label
= lab
;
3632 /* Filter out duplicate labels that arise when this handler
3633 is shadowed by an earlier one. When no labels are
3634 attached to the handler anymore, we remove
3635 the corresponding edge and then we delete unreachable
3636 blocks at the end of this pass. */
3637 if (! seen_values
.contains (TREE_VALUE (flt_node
)))
3639 tree t
= build_case_label (TREE_VALUE (flt_node
),
3641 labels
.safe_push (t
);
3642 seen_values
.add (TREE_VALUE (flt_node
));
3646 tp_node
= TREE_CHAIN (tp_node
);
3647 flt_node
= TREE_CHAIN (flt_node
);
3652 remove_edge (find_edge (src
, label_to_block (lab
)));
3657 /* Clean up the edge flags. */
3658 FOR_EACH_EDGE (e
, ei
, src
->succs
)
3660 if (e
->flags
& EDGE_FALLTHRU
)
3662 /* If there was no catch-all, use the fallthru edge. */
3663 if (default_label
== NULL
)
3664 default_label
= gimple_block_label (e
->dest
);
3665 e
->flags
&= ~EDGE_FALLTHRU
;
3668 gcc_assert (default_label
!= NULL
);
3670 /* Don't generate a switch if there's only a default case.
3671 This is common in the form of try { A; } catch (...) { B; }. */
3672 if (!labels
.exists ())
3674 e
= single_succ_edge (src
);
3675 e
->flags
|= EDGE_FALLTHRU
;
3679 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3680 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3682 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3683 filter
= make_ssa_name (filter
, x
);
3684 gimple_call_set_lhs (x
, filter
);
3685 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3687 /* Turn the default label into a default case. */
3688 default_label
= build_case_label (NULL
, NULL
, default_label
);
3689 sort_case_labels (labels
);
3691 x
= gimple_build_switch (filter
, default_label
, labels
);
3692 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3697 case ERT_ALLOWED_EXCEPTIONS
:
3699 edge b_e
= BRANCH_EDGE (src
);
3700 edge f_e
= FALLTHRU_EDGE (src
);
3702 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3703 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3705 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3706 filter
= make_ssa_name (filter
, x
);
3707 gimple_call_set_lhs (x
, filter
);
3708 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3710 r
->u
.allowed
.label
= NULL
;
3711 x
= gimple_build_cond (EQ_EXPR
, filter
,
3712 build_int_cst (TREE_TYPE (filter
),
3713 r
->u
.allowed
.filter
),
3714 NULL_TREE
, NULL_TREE
);
3715 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3717 b_e
->flags
= b_e
->flags
| EDGE_TRUE_VALUE
;
3718 f_e
->flags
= (f_e
->flags
& ~EDGE_FALLTHRU
) | EDGE_FALSE_VALUE
;
3726 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3727 gsi_remove (&gsi
, true);
3733 const pass_data pass_data_lower_eh_dispatch
=
3735 GIMPLE_PASS
, /* type */
3736 "ehdisp", /* name */
3737 OPTGROUP_NONE
, /* optinfo_flags */
3738 TV_TREE_EH
, /* tv_id */
3739 PROP_gimple_lcf
, /* properties_required */
3740 0, /* properties_provided */
3741 0, /* properties_destroyed */
3742 0, /* todo_flags_start */
3743 0, /* todo_flags_finish */
3746 class pass_lower_eh_dispatch
: public gimple_opt_pass
3749 pass_lower_eh_dispatch (gcc::context
*ctxt
)
3750 : gimple_opt_pass (pass_data_lower_eh_dispatch
, ctxt
)
3753 /* opt_pass methods: */
3754 virtual bool gate (function
*fun
) { return fun
->eh
->region_tree
!= NULL
; }
3755 virtual unsigned int execute (function
*);
3757 }; // class pass_lower_eh_dispatch
3760 pass_lower_eh_dispatch::execute (function
*fun
)
3764 bool redirected
= false;
3766 assign_filter_values ();
3768 FOR_EACH_BB_FN (bb
, fun
)
3770 gimple last
= last_stmt (bb
);
3773 if (gimple_code (last
) == GIMPLE_EH_DISPATCH
)
3775 redirected
|= lower_eh_dispatch (bb
,
3776 as_a
<geh_dispatch
*> (last
));
3777 flags
|= TODO_update_ssa_only_virtuals
;
3779 else if (gimple_code (last
) == GIMPLE_RESX
)
3781 if (stmt_can_throw_external (last
))
3782 optimize_clobbers (bb
);
3784 flags
|= sink_clobbers (bb
);
3789 delete_unreachable_blocks ();
3796 make_pass_lower_eh_dispatch (gcc::context
*ctxt
)
3798 return new pass_lower_eh_dispatch (ctxt
);
3801 /* Walk statements, see what regions and, optionally, landing pads
3802 are really referenced.
3804 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3805 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3807 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3810 The caller is responsible for freeing the returned sbitmaps. */
3813 mark_reachable_handlers (sbitmap
*r_reachablep
, sbitmap
*lp_reachablep
)
3815 sbitmap r_reachable
, lp_reachable
;
3817 bool mark_landing_pads
= (lp_reachablep
!= NULL
);
3818 gcc_checking_assert (r_reachablep
!= NULL
);
3820 r_reachable
= sbitmap_alloc (cfun
->eh
->region_array
->length ());
3821 bitmap_clear (r_reachable
);
3822 *r_reachablep
= r_reachable
;
3824 if (mark_landing_pads
)
3826 lp_reachable
= sbitmap_alloc (cfun
->eh
->lp_array
->length ());
3827 bitmap_clear (lp_reachable
);
3828 *lp_reachablep
= lp_reachable
;
3831 lp_reachable
= NULL
;
3833 FOR_EACH_BB_FN (bb
, cfun
)
3835 gimple_stmt_iterator gsi
;
3837 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3839 gimple stmt
= gsi_stmt (gsi
);
3841 if (mark_landing_pads
)
3843 int lp_nr
= lookup_stmt_eh_lp (stmt
);
3845 /* Negative LP numbers are MUST_NOT_THROW regions which
3846 are not considered BB enders. */
3848 bitmap_set_bit (r_reachable
, -lp_nr
);
3850 /* Positive LP numbers are real landing pads, and BB enders. */
3853 gcc_assert (gsi_one_before_end_p (gsi
));
3854 eh_region region
= get_eh_region_from_lp_number (lp_nr
);
3855 bitmap_set_bit (r_reachable
, region
->index
);
3856 bitmap_set_bit (lp_reachable
, lp_nr
);
3860 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3861 switch (gimple_code (stmt
))
3864 bitmap_set_bit (r_reachable
,
3865 gimple_resx_region (as_a
<gresx
*> (stmt
)));
3867 case GIMPLE_EH_DISPATCH
:
3868 bitmap_set_bit (r_reachable
,
3869 gimple_eh_dispatch_region (
3870 as_a
<geh_dispatch
*> (stmt
)));
3873 if (gimple_call_builtin_p (stmt
, BUILT_IN_EH_COPY_VALUES
))
3874 for (int i
= 0; i
< 2; ++i
)
3876 tree rt
= gimple_call_arg (stmt
, i
);
3877 HOST_WIDE_INT ri
= tree_to_shwi (rt
);
3879 gcc_assert (ri
= (int)ri
);
3880 bitmap_set_bit (r_reachable
, ri
);
3890 /* Remove unreachable handlers and unreachable landing pads. */
3893 remove_unreachable_handlers (void)
3895 sbitmap r_reachable
, lp_reachable
;
3900 mark_reachable_handlers (&r_reachable
, &lp_reachable
);
3904 fprintf (dump_file
, "Before removal of unreachable regions:\n");
3905 dump_eh_tree (dump_file
, cfun
);
3906 fprintf (dump_file
, "Reachable regions: ");
3907 dump_bitmap_file (dump_file
, r_reachable
);
3908 fprintf (dump_file
, "Reachable landing pads: ");
3909 dump_bitmap_file (dump_file
, lp_reachable
);
3914 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
3915 if (region
&& !bitmap_bit_p (r_reachable
, region
->index
))
3917 "Removing unreachable region %d\n",
3921 remove_unreachable_eh_regions (r_reachable
);
3923 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
3924 if (lp
&& !bitmap_bit_p (lp_reachable
, lp
->index
))
3928 "Removing unreachable landing pad %d\n",
3930 remove_eh_landing_pad (lp
);
3935 fprintf (dump_file
, "\n\nAfter removal of unreachable regions:\n");
3936 dump_eh_tree (dump_file
, cfun
);
3937 fprintf (dump_file
, "\n\n");
3940 sbitmap_free (r_reachable
);
3941 sbitmap_free (lp_reachable
);
3943 #ifdef ENABLE_CHECKING
3944 verify_eh_tree (cfun
);
3948 /* Remove unreachable handlers if any landing pads have been removed after
3949 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3952 maybe_remove_unreachable_handlers (void)
3957 if (cfun
->eh
== NULL
)
3960 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
3961 if (lp
&& lp
->post_landing_pad
)
3963 if (label_to_block (lp
->post_landing_pad
) == NULL
)
3965 remove_unreachable_handlers ();
3971 /* Remove regions that do not have landing pads. This assumes
3972 that remove_unreachable_handlers has already been run, and
3973 that we've just manipulated the landing pads since then.
3975 Preserve regions with landing pads and regions that prevent
3976 exceptions from propagating further, even if these regions
3977 are not reachable. */
3980 remove_unreachable_handlers_no_lp (void)
3983 sbitmap r_reachable
;
3986 mark_reachable_handlers (&r_reachable
, /*lp_reachablep=*/NULL
);
3988 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
3993 if (region
->landing_pads
!= NULL
3994 || region
->type
== ERT_MUST_NOT_THROW
)
3995 bitmap_set_bit (r_reachable
, region
->index
);
3998 && !bitmap_bit_p (r_reachable
, region
->index
))
4000 "Removing unreachable region %d\n",
4004 remove_unreachable_eh_regions (r_reachable
);
4006 sbitmap_free (r_reachable
);
4009 /* Undo critical edge splitting on an EH landing pad. Earlier, we
4010 optimisticaly split all sorts of edges, including EH edges. The
4011 optimization passes in between may not have needed them; if not,
4012 we should undo the split.
4014 Recognize this case by having one EH edge incoming to the BB and
4015 one normal edge outgoing; BB should be empty apart from the
4016 post_landing_pad label.
4018 Note that this is slightly different from the empty handler case
4019 handled by cleanup_empty_eh, in that the actual handler may yet
4020 have actual code but the landing pad has been separated from the
4021 handler. As such, cleanup_empty_eh relies on this transformation
4022 having been done first. */
4025 unsplit_eh (eh_landing_pad lp
)
4027 basic_block bb
= label_to_block (lp
->post_landing_pad
);
4028 gimple_stmt_iterator gsi
;
4031 /* Quickly check the edge counts on BB for singularity. */
4032 if (!single_pred_p (bb
) || !single_succ_p (bb
))
4034 e_in
= single_pred_edge (bb
);
4035 e_out
= single_succ_edge (bb
);
4037 /* Input edge must be EH and output edge must be normal. */
4038 if ((e_in
->flags
& EDGE_EH
) == 0 || (e_out
->flags
& EDGE_EH
) != 0)
4041 /* The block must be empty except for the labels and debug insns. */
4042 gsi
= gsi_after_labels (bb
);
4043 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4044 gsi_next_nondebug (&gsi
);
4045 if (!gsi_end_p (gsi
))
4048 /* The destination block must not already have a landing pad
4049 for a different region. */
4050 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4052 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4058 lab
= gimple_label_label (label_stmt
);
4059 lp_nr
= EH_LANDING_PAD_NR (lab
);
4060 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4064 /* The new destination block must not already be a destination of
4065 the source block, lest we merge fallthru and eh edges and get
4066 all sorts of confused. */
4067 if (find_edge (e_in
->src
, e_out
->dest
))
4070 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4071 thought this should have been cleaned up by a phicprop pass, but
4072 that doesn't appear to handle virtuals. Propagate by hand. */
4073 if (!gimple_seq_empty_p (phi_nodes (bb
)))
4075 for (gphi_iterator gpi
= gsi_start_phis (bb
); !gsi_end_p (gpi
); )
4078 gphi
*phi
= gpi
.phi ();
4079 tree lhs
= gimple_phi_result (phi
);
4080 tree rhs
= gimple_phi_arg_def (phi
, 0);
4081 use_operand_p use_p
;
4082 imm_use_iterator iter
;
4084 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
4086 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
4087 SET_USE (use_p
, rhs
);
4090 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4091 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
) = 1;
4093 remove_phi_node (&gpi
, true);
4097 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4098 fprintf (dump_file
, "Unsplit EH landing pad %d to block %i.\n",
4099 lp
->index
, e_out
->dest
->index
);
4101 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4102 a successor edge, humor it. But do the real CFG change with the
4103 predecessor of E_OUT in order to preserve the ordering of arguments
4104 to the PHI nodes in E_OUT->DEST. */
4105 redirect_eh_edge_1 (e_in
, e_out
->dest
, false);
4106 redirect_edge_pred (e_out
, e_in
->src
);
4107 e_out
->flags
= e_in
->flags
;
4108 e_out
->probability
= e_in
->probability
;
4109 e_out
->count
= e_in
->count
;
4115 /* Examine each landing pad block and see if it matches unsplit_eh. */
4118 unsplit_all_eh (void)
4120 bool changed
= false;
4124 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4126 changed
|= unsplit_eh (lp
);
4131 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4132 to OLD_BB to NEW_BB; return true on success, false on failure.
4134 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4135 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4136 Virtual PHIs may be deleted and marked for renaming. */
4139 cleanup_empty_eh_merge_phis (basic_block new_bb
, basic_block old_bb
,
4140 edge old_bb_out
, bool change_region
)
4142 gphi_iterator ngsi
, ogsi
;
4145 bitmap ophi_handled
;
4147 /* The destination block must not be a regular successor for any
4148 of the preds of the landing pad. Thus, avoid turning
4158 which CFG verification would choke on. See PR45172 and PR51089. */
4159 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4160 if (find_edge (e
->src
, new_bb
))
4163 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4164 redirect_edge_var_map_clear (e
);
4166 ophi_handled
= BITMAP_ALLOC (NULL
);
4168 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4169 for the edges we're going to move. */
4170 for (ngsi
= gsi_start_phis (new_bb
); !gsi_end_p (ngsi
); gsi_next (&ngsi
))
4172 gphi
*ophi
, *nphi
= ngsi
.phi ();
4175 nresult
= gimple_phi_result (nphi
);
4176 nop
= gimple_phi_arg_def (nphi
, old_bb_out
->dest_idx
);
4178 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4179 the source ssa_name. */
4181 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4184 if (gimple_phi_result (ophi
) == nop
)
4189 /* If we did find the corresponding PHI, copy those inputs. */
4192 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4193 if (!has_single_use (nop
))
4195 imm_use_iterator imm_iter
;
4196 use_operand_p use_p
;
4198 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, nop
)
4200 if (!gimple_debug_bind_p (USE_STMT (use_p
))
4201 && (gimple_code (USE_STMT (use_p
)) != GIMPLE_PHI
4202 || gimple_bb (USE_STMT (use_p
)) != new_bb
))
4206 bitmap_set_bit (ophi_handled
, SSA_NAME_VERSION (nop
));
4207 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4212 if ((e
->flags
& EDGE_EH
) == 0)
4214 oop
= gimple_phi_arg_def (ophi
, e
->dest_idx
);
4215 oloc
= gimple_phi_arg_location (ophi
, e
->dest_idx
);
4216 redirect_edge_var_map_add (e
, nresult
, oop
, oloc
);
4219 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4220 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4221 variable is unchanged from input to the block and we can simply
4222 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4226 = gimple_phi_arg_location (nphi
, old_bb_out
->dest_idx
);
4227 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4228 redirect_edge_var_map_add (e
, nresult
, nop
, nloc
);
4232 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4233 we don't know what values from the other edges into NEW_BB to use. */
4234 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4236 gphi
*ophi
= ogsi
.phi ();
4237 tree oresult
= gimple_phi_result (ophi
);
4238 if (!bitmap_bit_p (ophi_handled
, SSA_NAME_VERSION (oresult
)))
4242 /* Finally, move the edges and update the PHIs. */
4243 for (ei
= ei_start (old_bb
->preds
); (e
= ei_safe_edge (ei
)); )
4244 if (e
->flags
& EDGE_EH
)
4246 /* ??? CFG manipluation routines do not try to update loop
4247 form on edge redirection. Do so manually here for now. */
4248 /* If we redirect a loop entry or latch edge that will either create
4249 a multiple entry loop or rotate the loop. If the loops merge
4250 we may have created a loop with multiple latches.
4251 All of this isn't easily fixed thus cancel the affected loop
4252 and mark the other loop as possibly having multiple latches. */
4253 if (e
->dest
== e
->dest
->loop_father
->header
)
4255 mark_loop_for_removal (e
->dest
->loop_father
);
4256 new_bb
->loop_father
->latch
= NULL
;
4257 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
);
4259 redirect_eh_edge_1 (e
, new_bb
, change_region
);
4260 redirect_edge_succ (e
, new_bb
);
4261 flush_pending_stmts (e
);
4266 BITMAP_FREE (ophi_handled
);
4270 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4271 redirect_edge_var_map_clear (e
);
4272 BITMAP_FREE (ophi_handled
);
4276 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4277 old region to NEW_REGION at BB. */
4280 cleanup_empty_eh_move_lp (basic_block bb
, edge e_out
,
4281 eh_landing_pad lp
, eh_region new_region
)
4283 gimple_stmt_iterator gsi
;
4286 for (pp
= &lp
->region
->landing_pads
; *pp
!= lp
; pp
= &(*pp
)->next_lp
)
4290 lp
->region
= new_region
;
4291 lp
->next_lp
= new_region
->landing_pads
;
4292 new_region
->landing_pads
= lp
;
4294 /* Delete the RESX that was matched within the empty handler block. */
4295 gsi
= gsi_last_bb (bb
);
4296 unlink_stmt_vdef (gsi_stmt (gsi
));
4297 gsi_remove (&gsi
, true);
4299 /* Clean up E_OUT for the fallthru. */
4300 e_out
->flags
= (e_out
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
4301 e_out
->probability
= REG_BR_PROB_BASE
;
4304 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4305 unsplitting than unsplit_eh was prepared to handle, e.g. when
4306 multiple incoming edges and phis are involved. */
4309 cleanup_empty_eh_unsplit (basic_block bb
, edge e_out
, eh_landing_pad lp
)
4311 gimple_stmt_iterator gsi
;
4314 /* We really ought not have totally lost everything following
4315 a landing pad label. Given that BB is empty, there had better
4317 gcc_assert (e_out
!= NULL
);
4319 /* The destination block must not already have a landing pad
4320 for a different region. */
4322 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4324 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4329 lab
= gimple_label_label (stmt
);
4330 lp_nr
= EH_LANDING_PAD_NR (lab
);
4331 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4335 /* Attempt to move the PHIs into the successor block. */
4336 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, false))
4338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4340 "Unsplit EH landing pad %d to block %i "
4341 "(via cleanup_empty_eh).\n",
4342 lp
->index
, e_out
->dest
->index
);
4349 /* Return true if edge E_FIRST is part of an empty infinite loop
4350 or leads to such a loop through a series of single successor
4354 infinite_empty_loop_p (edge e_first
)
4356 bool inf_loop
= false;
4359 if (e_first
->dest
== e_first
->src
)
4362 e_first
->src
->aux
= (void *) 1;
4363 for (e
= e_first
; single_succ_p (e
->dest
); e
= single_succ_edge (e
->dest
))
4365 gimple_stmt_iterator gsi
;
4371 e
->dest
->aux
= (void *) 1;
4372 gsi
= gsi_after_labels (e
->dest
);
4373 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4374 gsi_next_nondebug (&gsi
);
4375 if (!gsi_end_p (gsi
))
4378 e_first
->src
->aux
= NULL
;
4379 for (e
= e_first
; e
->dest
->aux
; e
= single_succ_edge (e
->dest
))
4380 e
->dest
->aux
= NULL
;
4385 /* Examine the block associated with LP to determine if it's an empty
4386 handler for its EH region. If so, attempt to redirect EH edges to
4387 an outer region. Return true the CFG was updated in any way. This
4388 is similar to jump forwarding, just across EH edges. */
4391 cleanup_empty_eh (eh_landing_pad lp
)
4393 basic_block bb
= label_to_block (lp
->post_landing_pad
);
4394 gimple_stmt_iterator gsi
;
4396 eh_region new_region
;
4399 bool has_non_eh_pred
;
4403 /* There can be zero or one edges out of BB. This is the quickest test. */
4404 switch (EDGE_COUNT (bb
->succs
))
4410 e_out
= single_succ_edge (bb
);
4416 resx
= last_stmt (bb
);
4417 if (resx
&& is_gimple_resx (resx
))
4419 if (stmt_can_throw_external (resx
))
4420 optimize_clobbers (bb
);
4421 else if (sink_clobbers (bb
))
4425 gsi
= gsi_after_labels (bb
);
4427 /* Make sure to skip debug statements. */
4428 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4429 gsi_next_nondebug (&gsi
);
4431 /* If the block is totally empty, look for more unsplitting cases. */
4432 if (gsi_end_p (gsi
))
4434 /* For the degenerate case of an infinite loop bail out.
4435 If bb has no successors and is totally empty, which can happen e.g.
4436 because of incorrect noreturn attribute, bail out too. */
4438 || infinite_empty_loop_p (e_out
))
4441 return ret
| cleanup_empty_eh_unsplit (bb
, e_out
, lp
);
4444 /* The block should consist only of a single RESX statement, modulo a
4445 preceding call to __builtin_stack_restore if there is no outgoing
4446 edge, since the call can be eliminated in this case. */
4447 resx
= gsi_stmt (gsi
);
4448 if (!e_out
&& gimple_call_builtin_p (resx
, BUILT_IN_STACK_RESTORE
))
4451 resx
= gsi_stmt (gsi
);
4453 if (!is_gimple_resx (resx
))
4455 gcc_assert (gsi_one_before_end_p (gsi
));
4457 /* Determine if there are non-EH edges, or resx edges into the handler. */
4458 has_non_eh_pred
= false;
4459 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4460 if (!(e
->flags
& EDGE_EH
))
4461 has_non_eh_pred
= true;
4463 /* Find the handler that's outer of the empty handler by looking at
4464 where the RESX instruction was vectored. */
4465 new_lp_nr
= lookup_stmt_eh_lp (resx
);
4466 new_region
= get_eh_region_from_lp_number (new_lp_nr
);
4468 /* If there's no destination region within the current function,
4469 redirection is trivial via removing the throwing statements from
4470 the EH region, removing the EH edges, and allowing the block
4471 to go unreachable. */
4472 if (new_region
== NULL
)
4474 gcc_assert (e_out
== NULL
);
4475 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4476 if (e
->flags
& EDGE_EH
)
4478 gimple stmt
= last_stmt (e
->src
);
4479 remove_stmt_from_eh_lp (stmt
);
4487 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4488 to handle the abort and allow the blocks to go unreachable. */
4489 if (new_region
->type
== ERT_MUST_NOT_THROW
)
4491 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4492 if (e
->flags
& EDGE_EH
)
4494 gimple stmt
= last_stmt (e
->src
);
4495 remove_stmt_from_eh_lp (stmt
);
4496 add_stmt_to_eh_lp (stmt
, new_lp_nr
);
4504 /* Try to redirect the EH edges and merge the PHIs into the destination
4505 landing pad block. If the merge succeeds, we'll already have redirected
4506 all the EH edges. The handler itself will go unreachable if there were
4508 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, true))
4511 /* Finally, if all input edges are EH edges, then we can (potentially)
4512 reduce the number of transfers from the runtime by moving the landing
4513 pad from the original region to the new region. This is a win when
4514 we remove the last CLEANUP region along a particular exception
4515 propagation path. Since nothing changes except for the region with
4516 which the landing pad is associated, the PHI nodes do not need to be
4518 if (!has_non_eh_pred
)
4520 cleanup_empty_eh_move_lp (bb
, e_out
, lp
, new_region
);
4521 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4522 fprintf (dump_file
, "Empty EH handler %i moved to EH region %i.\n",
4523 lp
->index
, new_region
->index
);
4525 /* ??? The CFG didn't change, but we may have rendered the
4526 old EH region unreachable. Trigger a cleanup there. */
4533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4534 fprintf (dump_file
, "Empty EH handler %i removed.\n", lp
->index
);
4535 remove_eh_landing_pad (lp
);
4539 /* Do a post-order traversal of the EH region tree. Examine each
4540 post_landing_pad block and see if we can eliminate it as empty. */
4543 cleanup_all_empty_eh (void)
4545 bool changed
= false;
4549 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4551 changed
|= cleanup_empty_eh (lp
);
4556 /* Perform cleanups and lowering of exception handling
4557 1) cleanups regions with handlers doing nothing are optimized out
4558 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4559 3) Info about regions that are containing instructions, and regions
4560 reachable via local EH edges is collected
4561 4) Eh tree is pruned for regions no longer necessary.
4563 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4564 Unify those that have the same failure decl and locus.
4568 execute_cleanup_eh_1 (void)
4570 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4571 looking up unreachable landing pads. */
4572 remove_unreachable_handlers ();
4574 /* Watch out for the region tree vanishing due to all unreachable. */
4575 if (cfun
->eh
->region_tree
)
4577 bool changed
= false;
4580 changed
|= unsplit_all_eh ();
4581 changed
|= cleanup_all_empty_eh ();
4585 free_dominance_info (CDI_DOMINATORS
);
4586 free_dominance_info (CDI_POST_DOMINATORS
);
4588 /* We delayed all basic block deletion, as we may have performed
4589 cleanups on EH edges while non-EH edges were still present. */
4590 delete_unreachable_blocks ();
4592 /* We manipulated the landing pads. Remove any region that no
4593 longer has a landing pad. */
4594 remove_unreachable_handlers_no_lp ();
4596 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
4605 const pass_data pass_data_cleanup_eh
=
4607 GIMPLE_PASS
, /* type */
4608 "ehcleanup", /* name */
4609 OPTGROUP_NONE
, /* optinfo_flags */
4610 TV_TREE_EH
, /* tv_id */
4611 PROP_gimple_lcf
, /* properties_required */
4612 0, /* properties_provided */
4613 0, /* properties_destroyed */
4614 0, /* todo_flags_start */
4615 0, /* todo_flags_finish */
4618 class pass_cleanup_eh
: public gimple_opt_pass
4621 pass_cleanup_eh (gcc::context
*ctxt
)
4622 : gimple_opt_pass (pass_data_cleanup_eh
, ctxt
)
4625 /* opt_pass methods: */
4626 opt_pass
* clone () { return new pass_cleanup_eh (m_ctxt
); }
4627 virtual bool gate (function
*fun
)
4629 return fun
->eh
!= NULL
&& fun
->eh
->region_tree
!= NULL
;
4632 virtual unsigned int execute (function
*);
4634 }; // class pass_cleanup_eh
4637 pass_cleanup_eh::execute (function
*fun
)
4639 int ret
= execute_cleanup_eh_1 ();
4641 /* If the function no longer needs an EH personality routine
4642 clear it. This exposes cross-language inlining opportunities
4643 and avoids references to a never defined personality routine. */
4644 if (DECL_FUNCTION_PERSONALITY (current_function_decl
)
4645 && function_needs_eh_personality (fun
) != eh_personality_lang
)
4646 DECL_FUNCTION_PERSONALITY (current_function_decl
) = NULL_TREE
;
4654 make_pass_cleanup_eh (gcc::context
*ctxt
)
4656 return new pass_cleanup_eh (ctxt
);
4659 /* Verify that BB containing STMT as the last statement, has precisely the
4660 edge that make_eh_edges would create. */
4663 verify_eh_edges (gimple stmt
)
4665 basic_block bb
= gimple_bb (stmt
);
4666 eh_landing_pad lp
= NULL
;
4671 lp_nr
= lookup_stmt_eh_lp (stmt
);
4673 lp
= get_eh_landing_pad_from_number (lp_nr
);
4676 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4678 if (e
->flags
& EDGE_EH
)
4682 error ("BB %i has multiple EH edges", bb
->index
);
4694 error ("BB %i can not throw but has an EH edge", bb
->index
);
4700 if (!stmt_could_throw_p (stmt
))
4702 error ("BB %i last statement has incorrectly set lp", bb
->index
);
4706 if (eh_edge
== NULL
)
4708 error ("BB %i is missing an EH edge", bb
->index
);
4712 if (eh_edge
->dest
!= label_to_block (lp
->post_landing_pad
))
4714 error ("Incorrect EH edge %i->%i", bb
->index
, eh_edge
->dest
->index
);
4721 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4724 verify_eh_dispatch_edge (geh_dispatch
*stmt
)
4728 basic_block src
, dst
;
4729 bool want_fallthru
= true;
4733 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
4734 src
= gimple_bb (stmt
);
4736 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4737 gcc_assert (e
->aux
== NULL
);
4742 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
4744 dst
= label_to_block (c
->label
);
4745 e
= find_edge (src
, dst
);
4748 error ("BB %i is missing an edge", src
->index
);
4753 /* A catch-all handler doesn't have a fallthru. */
4754 if (c
->type_list
== NULL
)
4756 want_fallthru
= false;
4762 case ERT_ALLOWED_EXCEPTIONS
:
4763 dst
= label_to_block (r
->u
.allowed
.label
);
4764 e
= find_edge (src
, dst
);
4767 error ("BB %i is missing an edge", src
->index
);
4778 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4780 if (e
->flags
& EDGE_FALLTHRU
)
4782 if (fall_edge
!= NULL
)
4784 error ("BB %i too many fallthru edges", src
->index
);
4793 error ("BB %i has incorrect edge", src
->index
);
4797 if ((fall_edge
!= NULL
) ^ want_fallthru
)
4799 error ("BB %i has incorrect fallthru edge", src
->index
);