1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
43 #include "dominance.h"
46 #include "cfgcleanup.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
51 #include "gimple-expr.h"
53 #include "gimple-iterator.h"
54 #include "gimple-ssa.h"
55 #include "plugin-api.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-into-ssa.h"
65 #include "tree-inline.h"
66 #include "tree-pass.h"
67 #include "langhooks.h"
68 #include "diagnostic-core.h"
71 #include "gimple-low.h"
73 /* In some instances a tree and a gimple need to be stored in a same table,
74 i.e. in hash tables. This is a structure to do this. */
75 typedef union {tree
*tp
; tree t
; gimple g
;} treemple
;
77 /* Misc functions used in this file. */
79 /* Remember and lookup EH landing pad data for arbitrary statements.
80 Really this means any statement that could_throw_p. We could
81 stuff this information into the stmt_ann data structure, but:
83 (1) We absolutely rely on this information being kept until
84 we get to rtl. Once we're done with lowering here, if we lose
85 the information there's no way to recover it!
87 (2) There are many more statements that *cannot* throw as
88 compared to those that can. We should be saving some amount
89 of space by only allocating memory for those that can throw. */
91 /* Add statement T in function IFUN to landing pad NUM. */
94 add_stmt_to_eh_lp_fn (struct function
*ifun
, gimple t
, int num
)
96 gcc_assert (num
!= 0);
98 if (!get_eh_throw_stmt_table (ifun
))
99 set_eh_throw_stmt_table (ifun
, hash_map
<gimple
, int>::create_ggc (31));
101 gcc_assert (!get_eh_throw_stmt_table (ifun
)->put (t
, num
));
104 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
107 add_stmt_to_eh_lp (gimple t
, int num
)
109 add_stmt_to_eh_lp_fn (cfun
, t
, num
);
112 /* Add statement T to the single EH landing pad in REGION. */
115 record_stmt_eh_region (eh_region region
, gimple t
)
119 if (region
->type
== ERT_MUST_NOT_THROW
)
120 add_stmt_to_eh_lp_fn (cfun
, t
, -region
->index
);
123 eh_landing_pad lp
= region
->landing_pads
;
125 lp
= gen_eh_landing_pad (region
);
127 gcc_assert (lp
->next_lp
== NULL
);
128 add_stmt_to_eh_lp_fn (cfun
, t
, lp
->index
);
133 /* Remove statement T in function IFUN from its EH landing pad. */
136 remove_stmt_from_eh_lp_fn (struct function
*ifun
, gimple t
)
138 if (!get_eh_throw_stmt_table (ifun
))
141 if (!get_eh_throw_stmt_table (ifun
)->get (t
))
144 get_eh_throw_stmt_table (ifun
)->remove (t
);
149 /* Remove statement T in the current function (cfun) from its
153 remove_stmt_from_eh_lp (gimple t
)
155 return remove_stmt_from_eh_lp_fn (cfun
, t
);
158 /* Determine if statement T is inside an EH region in function IFUN.
159 Positive numbers indicate a landing pad index; negative numbers
160 indicate a MUST_NOT_THROW region index; zero indicates that the
161 statement is not recorded in the region table. */
164 lookup_stmt_eh_lp_fn (struct function
*ifun
, gimple t
)
166 if (ifun
->eh
->throw_stmt_table
== NULL
)
169 int *lp_nr
= ifun
->eh
->throw_stmt_table
->get (t
);
170 return lp_nr
? *lp_nr
: 0;
173 /* Likewise, but always use the current function. */
176 lookup_stmt_eh_lp (gimple t
)
178 /* We can get called from initialized data when -fnon-call-exceptions
179 is on; prevent crash. */
182 return lookup_stmt_eh_lp_fn (cfun
, t
);
185 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
186 nodes and LABEL_DECL nodes. We will use this during the second phase to
187 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
189 struct finally_tree_node
191 /* When storing a GIMPLE_TRY, we have to record a gimple. However
192 when deciding whether a GOTO to a certain LABEL_DECL (which is a
193 tree) leaves the TRY block, its necessary to record a tree in
194 this field. Thus a treemple is used. */
199 /* Hashtable helpers. */
201 struct finally_tree_hasher
: typed_free_remove
<finally_tree_node
>
203 typedef finally_tree_node
*value_type
;
204 typedef finally_tree_node
*compare_type
;
205 static inline hashval_t
hash (const finally_tree_node
*);
206 static inline bool equal (const finally_tree_node
*,
207 const finally_tree_node
*);
211 finally_tree_hasher::hash (const finally_tree_node
*v
)
213 return (intptr_t)v
->child
.t
>> 4;
217 finally_tree_hasher::equal (const finally_tree_node
*v
,
218 const finally_tree_node
*c
)
220 return v
->child
.t
== c
->child
.t
;
223 /* Note that this table is *not* marked GTY. It is short-lived. */
224 static hash_table
<finally_tree_hasher
> *finally_tree
;
227 record_in_finally_tree (treemple child
, gtry
*parent
)
229 struct finally_tree_node
*n
;
230 finally_tree_node
**slot
;
232 n
= XNEW (struct finally_tree_node
);
236 slot
= finally_tree
->find_slot (n
, INSERT
);
242 collect_finally_tree (gimple stmt
, gtry
*region
);
244 /* Go through the gimple sequence. Works with collect_finally_tree to
245 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
248 collect_finally_tree_1 (gimple_seq seq
, gtry
*region
)
250 gimple_stmt_iterator gsi
;
252 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
253 collect_finally_tree (gsi_stmt (gsi
), region
);
257 collect_finally_tree (gimple stmt
, gtry
*region
)
261 switch (gimple_code (stmt
))
264 temp
.t
= gimple_label_label (as_a
<glabel
*> (stmt
));
265 record_in_finally_tree (temp
, region
);
269 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
272 record_in_finally_tree (temp
, region
);
273 collect_finally_tree_1 (gimple_try_eval (stmt
),
274 as_a
<gtry
*> (stmt
));
275 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
277 else if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
279 collect_finally_tree_1 (gimple_try_eval (stmt
), region
);
280 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
285 collect_finally_tree_1 (gimple_catch_handler (
286 as_a
<gcatch
*> (stmt
)),
290 case GIMPLE_EH_FILTER
:
291 collect_finally_tree_1 (gimple_eh_filter_failure (stmt
), region
);
296 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
297 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt
), region
);
298 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt
), region
);
303 /* A type, a decl, or some kind of statement that we're not
304 interested in. Don't walk them. */
310 /* Use the finally tree to determine if a jump from START to TARGET
311 would leave the try_finally node that START lives in. */
314 outside_finally_tree (treemple start
, gimple target
)
316 struct finally_tree_node n
, *p
;
321 p
= finally_tree
->find (&n
);
326 while (start
.g
!= target
);
331 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
332 nodes into a set of gotos, magic labels, and eh regions.
333 The eh region creation is straight-forward, but frobbing all the gotos
334 and such into shape isn't. */
336 /* The sequence into which we record all EH stuff. This will be
337 placed at the end of the function when we're all done. */
338 static gimple_seq eh_seq
;
340 /* Record whether an EH region contains something that can throw,
341 indexed by EH region number. */
342 static bitmap eh_region_may_contain_throw_map
;
344 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
345 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
346 The idea is to record a gimple statement for everything except for
347 the conditionals, which get their labels recorded. Since labels are
348 of type 'tree', we need this node to store both gimple and tree
349 objects. REPL_STMT is the sequence used to replace the goto/return
350 statement. CONT_STMT is used to store the statement that allows
351 the return/goto to jump to the original destination. */
353 struct goto_queue_node
357 gimple_seq repl_stmt
;
360 /* This is used when index >= 0 to indicate that stmt is a label (as
361 opposed to a goto stmt). */
365 /* State of the world while lowering. */
369 /* What's "current" while constructing the eh region tree. These
370 correspond to variables of the same name in cfun->eh, which we
371 don't have easy access to. */
372 eh_region cur_region
;
374 /* What's "current" for the purposes of __builtin_eh_pointer. For
375 a CATCH, this is the associated TRY. For an EH_FILTER, this is
376 the associated ALLOWED_EXCEPTIONS, etc. */
377 eh_region ehp_region
;
379 /* Processing of TRY_FINALLY requires a bit more state. This is
380 split out into a separate structure so that we don't have to
381 copy so much when processing other nodes. */
382 struct leh_tf_state
*tf
;
387 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
388 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
389 this so that outside_finally_tree can reliably reference the tree used
390 in the collect_finally_tree data structures. */
391 gtry
*try_finally_expr
;
394 /* While lowering a top_p usually it is expanded into multiple statements,
395 thus we need the following field to store them. */
396 gimple_seq top_p_seq
;
398 /* The state outside this try_finally node. */
399 struct leh_state
*outer
;
401 /* The exception region created for it. */
404 /* The goto queue. */
405 struct goto_queue_node
*goto_queue
;
406 size_t goto_queue_size
;
407 size_t goto_queue_active
;
409 /* Pointer map to help in searching goto_queue when it is large. */
410 hash_map
<gimple
, goto_queue_node
*> *goto_queue_map
;
412 /* The set of unique labels seen as entries in the goto queue. */
413 vec
<tree
> dest_array
;
415 /* A label to be added at the end of the completed transformed
416 sequence. It will be set if may_fallthru was true *at one time*,
417 though subsequent transformations may have cleared that flag. */
420 /* True if it is possible to fall out the bottom of the try block.
421 Cleared if the fallthru is converted to a goto. */
424 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
427 /* True if the finally block can receive an exception edge.
428 Cleared if the exception case is handled by code duplication. */
432 static gimple_seq
lower_eh_must_not_throw (struct leh_state
*, gtry
*);
434 /* Search for STMT in the goto queue. Return the replacement,
435 or null if the statement isn't in the queue. */
437 #define LARGE_GOTO_QUEUE 20
439 static void lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*seq
);
442 find_goto_replacement (struct leh_tf_state
*tf
, treemple stmt
)
446 if (tf
->goto_queue_active
< LARGE_GOTO_QUEUE
)
448 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
449 if ( tf
->goto_queue
[i
].stmt
.g
== stmt
.g
)
450 return tf
->goto_queue
[i
].repl_stmt
;
454 /* If we have a large number of entries in the goto_queue, create a
455 pointer map and use that for searching. */
457 if (!tf
->goto_queue_map
)
459 tf
->goto_queue_map
= new hash_map
<gimple
, goto_queue_node
*>;
460 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
462 bool existed
= tf
->goto_queue_map
->put (tf
->goto_queue
[i
].stmt
.g
,
464 gcc_assert (!existed
);
468 goto_queue_node
**slot
= tf
->goto_queue_map
->get (stmt
.g
);
470 return ((*slot
)->repl_stmt
);
475 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
476 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
477 then we can just splat it in, otherwise we add the new stmts immediately
478 after the GIMPLE_COND and redirect. */
481 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
482 gimple_stmt_iterator
*gsi
)
487 location_t loc
= gimple_location (gsi_stmt (*gsi
));
490 new_seq
= find_goto_replacement (tf
, temp
);
494 if (gimple_seq_singleton_p (new_seq
)
495 && gimple_code (gimple_seq_first_stmt (new_seq
)) == GIMPLE_GOTO
)
497 *tp
= gimple_goto_dest (gimple_seq_first_stmt (new_seq
));
501 label
= create_artificial_label (loc
);
502 /* Set the new label for the GIMPLE_COND */
505 gsi_insert_after (gsi
, gimple_build_label (label
), GSI_CONTINUE_LINKING
);
506 gsi_insert_seq_after (gsi
, gimple_seq_copy (new_seq
), GSI_CONTINUE_LINKING
);
509 /* The real work of replace_goto_queue. Returns with TSI updated to
510 point to the next statement. */
512 static void replace_goto_queue_stmt_list (gimple_seq
*, struct leh_tf_state
*);
515 replace_goto_queue_1 (gimple stmt
, struct leh_tf_state
*tf
,
516 gimple_stmt_iterator
*gsi
)
522 switch (gimple_code (stmt
))
527 seq
= find_goto_replacement (tf
, temp
);
530 gsi_insert_seq_before (gsi
, gimple_seq_copy (seq
), GSI_SAME_STMT
);
531 gsi_remove (gsi
, false);
537 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 2), tf
, gsi
);
538 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 3), tf
, gsi
);
542 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt
), tf
);
543 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt
), tf
);
546 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
547 as_a
<gcatch
*> (stmt
)),
550 case GIMPLE_EH_FILTER
:
551 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt
), tf
);
555 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
556 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt
),
558 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt
),
564 /* These won't have gotos in them. */
571 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
574 replace_goto_queue_stmt_list (gimple_seq
*seq
, struct leh_tf_state
*tf
)
576 gimple_stmt_iterator gsi
= gsi_start (*seq
);
578 while (!gsi_end_p (gsi
))
579 replace_goto_queue_1 (gsi_stmt (gsi
), tf
, &gsi
);
582 /* Replace all goto queue members. */
585 replace_goto_queue (struct leh_tf_state
*tf
)
587 if (tf
->goto_queue_active
== 0)
589 replace_goto_queue_stmt_list (&tf
->top_p_seq
, tf
);
590 replace_goto_queue_stmt_list (&eh_seq
, tf
);
593 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
594 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
598 record_in_goto_queue (struct leh_tf_state
*tf
,
605 struct goto_queue_node
*q
;
607 gcc_assert (!tf
->goto_queue_map
);
609 active
= tf
->goto_queue_active
;
610 size
= tf
->goto_queue_size
;
613 size
= (size
? size
* 2 : 32);
614 tf
->goto_queue_size
= size
;
616 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
619 q
= &tf
->goto_queue
[active
];
620 tf
->goto_queue_active
= active
+ 1;
622 memset (q
, 0, sizeof (*q
));
625 q
->location
= location
;
626 q
->is_label
= is_label
;
629 /* Record the LABEL label in the goto queue contained in TF.
633 record_in_goto_queue_label (struct leh_tf_state
*tf
, treemple stmt
, tree label
,
637 treemple temp
, new_stmt
;
642 /* Computed and non-local gotos do not get processed. Given
643 their nature we can neither tell whether we've escaped the
644 finally block nor redirect them if we knew. */
645 if (TREE_CODE (label
) != LABEL_DECL
)
648 /* No need to record gotos that don't leave the try block. */
650 if (!outside_finally_tree (temp
, tf
->try_finally_expr
))
653 if (! tf
->dest_array
.exists ())
655 tf
->dest_array
.create (10);
656 tf
->dest_array
.quick_push (label
);
661 int n
= tf
->dest_array
.length ();
662 for (index
= 0; index
< n
; ++index
)
663 if (tf
->dest_array
[index
] == label
)
666 tf
->dest_array
.safe_push (label
);
669 /* In the case of a GOTO we want to record the destination label,
670 since with a GIMPLE_COND we have an easy access to the then/else
673 record_in_goto_queue (tf
, new_stmt
, index
, true, location
);
676 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
677 node, and if so record that fact in the goto queue associated with that
681 maybe_record_in_goto_queue (struct leh_state
*state
, gimple stmt
)
683 struct leh_tf_state
*tf
= state
->tf
;
689 switch (gimple_code (stmt
))
693 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
694 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 2);
695 record_in_goto_queue_label (tf
, new_stmt
,
696 gimple_cond_true_label (cond_stmt
),
697 EXPR_LOCATION (*new_stmt
.tp
));
698 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 3);
699 record_in_goto_queue_label (tf
, new_stmt
,
700 gimple_cond_false_label (cond_stmt
),
701 EXPR_LOCATION (*new_stmt
.tp
));
706 record_in_goto_queue_label (tf
, new_stmt
, gimple_goto_dest (stmt
),
707 gimple_location (stmt
));
711 tf
->may_return
= true;
713 record_in_goto_queue (tf
, new_stmt
, -1, false, gimple_location (stmt
));
722 #ifdef ENABLE_CHECKING
723 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
724 was in fact structured, and we've not yet done jump threading, then none
725 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
728 verify_norecord_switch_expr (struct leh_state
*state
,
729 gswitch
*switch_expr
)
731 struct leh_tf_state
*tf
= state
->tf
;
737 n
= gimple_switch_num_labels (switch_expr
);
739 for (i
= 0; i
< n
; ++i
)
742 tree lab
= CASE_LABEL (gimple_switch_label (switch_expr
, i
));
744 gcc_assert (!outside_finally_tree (temp
, tf
->try_finally_expr
));
748 #define verify_norecord_switch_expr(state, switch_expr)
751 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
752 non-null, insert it before the new branch. */
755 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
)
759 /* In the case of a return, the queue node must be a gimple statement. */
760 gcc_assert (!q
->is_label
);
762 /* Note that the return value may have already been computed, e.g.,
775 should return 0, not 1. We don't have to do anything to make
776 this happens because the return value has been placed in the
777 RESULT_DECL already. */
779 q
->cont_stmt
= q
->stmt
.g
;
782 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
784 x
= gimple_build_goto (finlab
);
785 gimple_set_location (x
, q
->location
);
786 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
789 /* Similar, but easier, for GIMPLE_GOTO. */
792 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
,
793 struct leh_tf_state
*tf
)
797 gcc_assert (q
->is_label
);
799 q
->cont_stmt
= gimple_build_goto (tf
->dest_array
[q
->index
]);
802 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
804 x
= gimple_build_goto (finlab
);
805 gimple_set_location (x
, q
->location
);
806 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
809 /* Emit a standard landing pad sequence into SEQ for REGION. */
812 emit_post_landing_pad (gimple_seq
*seq
, eh_region region
)
814 eh_landing_pad lp
= region
->landing_pads
;
818 lp
= gen_eh_landing_pad (region
);
820 lp
->post_landing_pad
= create_artificial_label (UNKNOWN_LOCATION
);
821 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = lp
->index
;
823 x
= gimple_build_label (lp
->post_landing_pad
);
824 gimple_seq_add_stmt (seq
, x
);
827 /* Emit a RESX statement into SEQ for REGION. */
830 emit_resx (gimple_seq
*seq
, eh_region region
)
832 gresx
*x
= gimple_build_resx (region
->index
);
833 gimple_seq_add_stmt (seq
, x
);
835 record_stmt_eh_region (region
->outer
, x
);
838 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
841 emit_eh_dispatch (gimple_seq
*seq
, eh_region region
)
843 geh_dispatch
*x
= gimple_build_eh_dispatch (region
->index
);
844 gimple_seq_add_stmt (seq
, x
);
847 /* Note that the current EH region may contain a throw, or a
848 call to a function which itself may contain a throw. */
851 note_eh_region_may_contain_throw (eh_region region
)
853 while (bitmap_set_bit (eh_region_may_contain_throw_map
, region
->index
))
855 if (region
->type
== ERT_MUST_NOT_THROW
)
857 region
= region
->outer
;
863 /* Check if REGION has been marked as containing a throw. If REGION is
864 NULL, this predicate is false. */
867 eh_region_may_contain_throw (eh_region r
)
869 return r
&& bitmap_bit_p (eh_region_may_contain_throw_map
, r
->index
);
872 /* We want to transform
873 try { body; } catch { stuff; }
883 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
884 should be placed before the second operand, or NULL. OVER is
885 an existing label that should be put at the exit, or NULL. */
888 frob_into_branch_around (gtry
*tp
, eh_region region
, tree over
)
891 gimple_seq cleanup
, result
;
892 location_t loc
= gimple_location (tp
);
894 cleanup
= gimple_try_cleanup (tp
);
895 result
= gimple_try_eval (tp
);
898 emit_post_landing_pad (&eh_seq
, region
);
900 if (gimple_seq_may_fallthru (cleanup
))
903 over
= create_artificial_label (loc
);
904 x
= gimple_build_goto (over
);
905 gimple_set_location (x
, loc
);
906 gimple_seq_add_stmt (&cleanup
, x
);
908 gimple_seq_add_seq (&eh_seq
, cleanup
);
912 x
= gimple_build_label (over
);
913 gimple_seq_add_stmt (&result
, x
);
918 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
919 Make sure to record all new labels found. */
922 lower_try_finally_dup_block (gimple_seq seq
, struct leh_state
*outer_state
,
927 gimple_stmt_iterator gsi
;
929 new_seq
= copy_gimple_seq_and_replace_locals (seq
);
931 for (gsi
= gsi_start (new_seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
933 gimple stmt
= gsi_stmt (gsi
);
934 if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
936 tree block
= gimple_block (stmt
);
937 gimple_set_location (stmt
, loc
);
938 gimple_set_block (stmt
, block
);
943 region
= outer_state
->tf
->try_finally_expr
;
944 collect_finally_tree_1 (new_seq
, region
);
949 /* A subroutine of lower_try_finally. Create a fallthru label for
950 the given try_finally state. The only tricky bit here is that
951 we have to make sure to record the label in our outer context. */
954 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
956 tree label
= tf
->fallthru_label
;
961 label
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
962 tf
->fallthru_label
= label
;
966 record_in_finally_tree (temp
, tf
->outer
->tf
->try_finally_expr
);
972 /* A subroutine of lower_try_finally. If FINALLY consits of a
973 GIMPLE_EH_ELSE node, return it. */
975 static inline geh_else
*
976 get_eh_else (gimple_seq finally
)
978 gimple x
= gimple_seq_first_stmt (finally
);
979 if (gimple_code (x
) == GIMPLE_EH_ELSE
)
981 gcc_assert (gimple_seq_singleton_p (finally
));
982 return as_a
<geh_else
*> (x
);
987 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
988 langhook returns non-null, then the language requires that the exception
989 path out of a try_finally be treated specially. To wit: the code within
990 the finally block may not itself throw an exception. We have two choices
991 here. First we can duplicate the finally block and wrap it in a
992 must_not_throw region. Second, we can generate code like
997 if (fintmp == eh_edge)
998 protect_cleanup_actions;
1001 where "fintmp" is the temporary used in the switch statement generation
1002 alternative considered below. For the nonce, we always choose the first
1005 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
1008 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
1009 struct leh_state
*this_state
,
1010 struct leh_tf_state
*tf
)
1012 tree protect_cleanup_actions
;
1013 gimple_stmt_iterator gsi
;
1014 bool finally_may_fallthru
;
1021 /* First check for nothing to do. */
1022 if (lang_hooks
.eh_protect_cleanup_actions
== NULL
)
1024 protect_cleanup_actions
= lang_hooks
.eh_protect_cleanup_actions ();
1025 if (protect_cleanup_actions
== NULL
)
1028 finally
= gimple_try_cleanup (tf
->top_p
);
1029 eh_else
= get_eh_else (finally
);
1031 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1032 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1035 finally
= gimple_eh_else_e_body (eh_else
);
1036 gimple_try_set_cleanup (tf
->top_p
, gimple_eh_else_n_body (eh_else
));
1038 else if (this_state
)
1039 finally
= lower_try_finally_dup_block (finally
, outer_state
,
1040 gimple_location (tf
->try_finally_expr
));
1041 finally_may_fallthru
= gimple_seq_may_fallthru (finally
);
1043 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1044 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1045 to be in an enclosing scope, but needs to be implemented at this level
1046 to avoid a nesting violation (see wrap_temporary_cleanups in
1047 cp/decl.c). Since it's logically at an outer level, we should call
1048 terminate before we get to it, so strip it away before adding the
1049 MUST_NOT_THROW filter. */
1050 gsi
= gsi_start (finally
);
1052 if (gimple_code (x
) == GIMPLE_TRY
1053 && gimple_try_kind (x
) == GIMPLE_TRY_CATCH
1054 && gimple_try_catch_is_cleanup (x
))
1056 gsi_insert_seq_before (&gsi
, gimple_try_eval (x
), GSI_SAME_STMT
);
1057 gsi_remove (&gsi
, false);
1060 /* Wrap the block with protect_cleanup_actions as the action. */
1061 eh_mnt
= gimple_build_eh_must_not_throw (protect_cleanup_actions
);
1062 try_stmt
= gimple_build_try (finally
, gimple_seq_alloc_with_stmt (eh_mnt
),
1064 finally
= lower_eh_must_not_throw (outer_state
, try_stmt
);
1066 /* Drop all of this into the exception sequence. */
1067 emit_post_landing_pad (&eh_seq
, tf
->region
);
1068 gimple_seq_add_seq (&eh_seq
, finally
);
1069 if (finally_may_fallthru
)
1070 emit_resx (&eh_seq
, tf
->region
);
1072 /* Having now been handled, EH isn't to be considered with
1073 the rest of the outgoing edges. */
1074 tf
->may_throw
= false;
1077 /* A subroutine of lower_try_finally. We have determined that there is
1078 no fallthru edge out of the finally block. This means that there is
1079 no outgoing edge corresponding to any incoming edge. Restructure the
1080 try_finally node for this special case. */
1083 lower_try_finally_nofallthru (struct leh_state
*state
,
1084 struct leh_tf_state
*tf
)
1090 struct goto_queue_node
*q
, *qe
;
1092 lab
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
1094 /* We expect that tf->top_p is a GIMPLE_TRY. */
1095 finally
= gimple_try_cleanup (tf
->top_p
);
1096 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1098 x
= gimple_build_label (lab
);
1099 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1102 qe
= q
+ tf
->goto_queue_active
;
1105 do_return_redirection (q
, lab
, NULL
);
1107 do_goto_redirection (q
, lab
, NULL
, tf
);
1109 replace_goto_queue (tf
);
1111 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1112 eh_else
= get_eh_else (finally
);
1115 finally
= gimple_eh_else_n_body (eh_else
);
1116 lower_eh_constructs_1 (state
, &finally
);
1117 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1121 finally
= gimple_eh_else_e_body (eh_else
);
1122 lower_eh_constructs_1 (state
, &finally
);
1124 emit_post_landing_pad (&eh_seq
, tf
->region
);
1125 gimple_seq_add_seq (&eh_seq
, finally
);
1130 lower_eh_constructs_1 (state
, &finally
);
1131 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1135 emit_post_landing_pad (&eh_seq
, tf
->region
);
1137 x
= gimple_build_goto (lab
);
1138 gimple_set_location (x
, gimple_location (tf
->try_finally_expr
));
1139 gimple_seq_add_stmt (&eh_seq
, x
);
1144 /* A subroutine of lower_try_finally. We have determined that there is
1145 exactly one destination of the finally block. Restructure the
1146 try_finally node for this special case. */
1149 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
1151 struct goto_queue_node
*q
, *qe
;
1156 gimple_stmt_iterator gsi
;
1158 location_t loc
= gimple_location (tf
->try_finally_expr
);
1160 finally
= gimple_try_cleanup (tf
->top_p
);
1161 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1163 /* Since there's only one destination, and the destination edge can only
1164 either be EH or non-EH, that implies that all of our incoming edges
1165 are of the same type. Therefore we can lower EH_ELSE immediately. */
1166 eh_else
= get_eh_else (finally
);
1170 finally
= gimple_eh_else_e_body (eh_else
);
1172 finally
= gimple_eh_else_n_body (eh_else
);
1175 lower_eh_constructs_1 (state
, &finally
);
1177 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1179 gimple stmt
= gsi_stmt (gsi
);
1180 if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
1182 tree block
= gimple_block (stmt
);
1183 gimple_set_location (stmt
, gimple_location (tf
->try_finally_expr
));
1184 gimple_set_block (stmt
, block
);
1190 /* Only reachable via the exception edge. Add the given label to
1191 the head of the FINALLY block. Append a RESX at the end. */
1192 emit_post_landing_pad (&eh_seq
, tf
->region
);
1193 gimple_seq_add_seq (&eh_seq
, finally
);
1194 emit_resx (&eh_seq
, tf
->region
);
1198 if (tf
->may_fallthru
)
1200 /* Only reachable via the fallthru edge. Do nothing but let
1201 the two blocks run together; we'll fall out the bottom. */
1202 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1206 finally_label
= create_artificial_label (loc
);
1207 label_stmt
= gimple_build_label (finally_label
);
1208 gimple_seq_add_stmt (&tf
->top_p_seq
, label_stmt
);
1210 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1213 qe
= q
+ tf
->goto_queue_active
;
1217 /* Reachable by return expressions only. Redirect them. */
1219 do_return_redirection (q
, finally_label
, NULL
);
1220 replace_goto_queue (tf
);
1224 /* Reachable by goto expressions only. Redirect them. */
1226 do_goto_redirection (q
, finally_label
, NULL
, tf
);
1227 replace_goto_queue (tf
);
1229 if (tf
->dest_array
[0] == tf
->fallthru_label
)
1231 /* Reachable by goto to fallthru label only. Redirect it
1232 to the new label (already created, sadly), and do not
1233 emit the final branch out, or the fallthru label. */
1234 tf
->fallthru_label
= NULL
;
1239 /* Place the original return/goto to the original destination
1240 immediately after the finally block. */
1241 x
= tf
->goto_queue
[0].cont_stmt
;
1242 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1243 maybe_record_in_goto_queue (state
, x
);
1246 /* A subroutine of lower_try_finally. There are multiple edges incoming
1247 and outgoing from the finally block. Implement this by duplicating the
1248 finally block for every destination. */
1251 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1254 gimple_seq new_stmt
;
1259 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1261 finally
= gimple_try_cleanup (tf
->top_p
);
1263 /* Notice EH_ELSE, and simplify some of the remaining code
1264 by considering FINALLY to be the normal return path only. */
1265 eh_else
= get_eh_else (finally
);
1267 finally
= gimple_eh_else_n_body (eh_else
);
1269 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1272 if (tf
->may_fallthru
)
1274 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1275 lower_eh_constructs_1 (state
, &seq
);
1276 gimple_seq_add_seq (&new_stmt
, seq
);
1278 tmp
= lower_try_finally_fallthru_label (tf
);
1279 x
= gimple_build_goto (tmp
);
1280 gimple_set_location (x
, tf_loc
);
1281 gimple_seq_add_stmt (&new_stmt
, x
);
1286 /* We don't need to copy the EH path of EH_ELSE,
1287 since it is only emitted once. */
1289 seq
= gimple_eh_else_e_body (eh_else
);
1291 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1292 lower_eh_constructs_1 (state
, &seq
);
1294 emit_post_landing_pad (&eh_seq
, tf
->region
);
1295 gimple_seq_add_seq (&eh_seq
, seq
);
1296 emit_resx (&eh_seq
, tf
->region
);
1301 struct goto_queue_node
*q
, *qe
;
1302 int return_index
, index
;
1305 struct goto_queue_node
*q
;
1309 return_index
= tf
->dest_array
.length ();
1310 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1313 qe
= q
+ tf
->goto_queue_active
;
1316 index
= q
->index
< 0 ? return_index
: q
->index
;
1318 if (!labels
[index
].q
)
1319 labels
[index
].q
= q
;
1322 for (index
= 0; index
< return_index
+ 1; index
++)
1326 q
= labels
[index
].q
;
1330 lab
= labels
[index
].label
1331 = create_artificial_label (tf_loc
);
1333 if (index
== return_index
)
1334 do_return_redirection (q
, lab
, NULL
);
1336 do_goto_redirection (q
, lab
, NULL
, tf
);
1338 x
= gimple_build_label (lab
);
1339 gimple_seq_add_stmt (&new_stmt
, x
);
1341 seq
= lower_try_finally_dup_block (finally
, state
, q
->location
);
1342 lower_eh_constructs_1 (state
, &seq
);
1343 gimple_seq_add_seq (&new_stmt
, seq
);
1345 gimple_seq_add_stmt (&new_stmt
, q
->cont_stmt
);
1346 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1349 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1353 index
= q
->index
< 0 ? return_index
: q
->index
;
1355 if (labels
[index
].q
== q
)
1358 lab
= labels
[index
].label
;
1360 if (index
== return_index
)
1361 do_return_redirection (q
, lab
, NULL
);
1363 do_goto_redirection (q
, lab
, NULL
, tf
);
1366 replace_goto_queue (tf
);
1370 /* Need to link new stmts after running replace_goto_queue due
1371 to not wanting to process the same goto stmts twice. */
1372 gimple_seq_add_seq (&tf
->top_p_seq
, new_stmt
);
1375 /* A subroutine of lower_try_finally. There are multiple edges incoming
1376 and outgoing from the finally block. Implement this by instrumenting
1377 each incoming edge and creating a switch statement at the end of the
1378 finally block that branches to the appropriate destination. */
1381 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1383 struct goto_queue_node
*q
, *qe
;
1384 tree finally_tmp
, finally_label
;
1385 int return_index
, eh_index
, fallthru_index
;
1386 int nlabels
, ndests
, j
, last_case_index
;
1388 vec
<tree
> case_label_vec
;
1389 gimple_seq switch_body
= NULL
;
1395 hash_map
<tree
, gimple
> *cont_map
= NULL
;
1396 /* The location of the TRY_FINALLY stmt. */
1397 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1398 /* The location of the finally block. */
1399 location_t finally_loc
;
1401 finally
= gimple_try_cleanup (tf
->top_p
);
1402 eh_else
= get_eh_else (finally
);
1404 /* Mash the TRY block to the head of the chain. */
1405 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1407 /* The location of the finally is either the last stmt in the finally
1408 block or the location of the TRY_FINALLY itself. */
1409 x
= gimple_seq_last_stmt (finally
);
1410 finally_loc
= x
? gimple_location (x
) : tf_loc
;
1412 /* Prepare for switch statement generation. */
1413 nlabels
= tf
->dest_array
.length ();
1414 return_index
= nlabels
;
1415 eh_index
= return_index
+ tf
->may_return
;
1416 fallthru_index
= eh_index
+ (tf
->may_throw
&& !eh_else
);
1417 ndests
= fallthru_index
+ tf
->may_fallthru
;
1419 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1420 finally_label
= create_artificial_label (finally_loc
);
1422 /* We use vec::quick_push on case_label_vec throughout this function,
1423 since we know the size in advance and allocate precisely as muce
1425 case_label_vec
.create (ndests
);
1427 last_case_index
= 0;
1429 /* Begin inserting code for getting to the finally block. Things
1430 are done in this order to correspond to the sequence the code is
1433 if (tf
->may_fallthru
)
1435 x
= gimple_build_assign (finally_tmp
,
1436 build_int_cst (integer_type_node
,
1438 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1440 tmp
= build_int_cst (integer_type_node
, fallthru_index
);
1441 last_case
= build_case_label (tmp
, NULL
,
1442 create_artificial_label (tf_loc
));
1443 case_label_vec
.quick_push (last_case
);
1446 x
= gimple_build_label (CASE_LABEL (last_case
));
1447 gimple_seq_add_stmt (&switch_body
, x
);
1449 tmp
= lower_try_finally_fallthru_label (tf
);
1450 x
= gimple_build_goto (tmp
);
1451 gimple_set_location (x
, tf_loc
);
1452 gimple_seq_add_stmt (&switch_body
, x
);
1455 /* For EH_ELSE, emit the exception path (plus resx) now, then
1456 subsequently we only need consider the normal path. */
1461 finally
= gimple_eh_else_e_body (eh_else
);
1462 lower_eh_constructs_1 (state
, &finally
);
1464 emit_post_landing_pad (&eh_seq
, tf
->region
);
1465 gimple_seq_add_seq (&eh_seq
, finally
);
1466 emit_resx (&eh_seq
, tf
->region
);
1469 finally
= gimple_eh_else_n_body (eh_else
);
1471 else if (tf
->may_throw
)
1473 emit_post_landing_pad (&eh_seq
, tf
->region
);
1475 x
= gimple_build_assign (finally_tmp
,
1476 build_int_cst (integer_type_node
, eh_index
));
1477 gimple_seq_add_stmt (&eh_seq
, x
);
1479 x
= gimple_build_goto (finally_label
);
1480 gimple_set_location (x
, tf_loc
);
1481 gimple_seq_add_stmt (&eh_seq
, x
);
1483 tmp
= build_int_cst (integer_type_node
, eh_index
);
1484 last_case
= build_case_label (tmp
, NULL
,
1485 create_artificial_label (tf_loc
));
1486 case_label_vec
.quick_push (last_case
);
1489 x
= gimple_build_label (CASE_LABEL (last_case
));
1490 gimple_seq_add_stmt (&eh_seq
, x
);
1491 emit_resx (&eh_seq
, tf
->region
);
1494 x
= gimple_build_label (finally_label
);
1495 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1497 lower_eh_constructs_1 (state
, &finally
);
1498 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1500 /* Redirect each incoming goto edge. */
1502 qe
= q
+ tf
->goto_queue_active
;
1503 j
= last_case_index
+ tf
->may_return
;
1504 /* Prepare the assignments to finally_tmp that are executed upon the
1505 entrance through a particular edge. */
1508 gimple_seq mod
= NULL
;
1510 unsigned int case_index
;
1514 x
= gimple_build_assign (finally_tmp
,
1515 build_int_cst (integer_type_node
,
1517 gimple_seq_add_stmt (&mod
, x
);
1518 do_return_redirection (q
, finally_label
, mod
);
1519 switch_id
= return_index
;
1523 x
= gimple_build_assign (finally_tmp
,
1524 build_int_cst (integer_type_node
, q
->index
));
1525 gimple_seq_add_stmt (&mod
, x
);
1526 do_goto_redirection (q
, finally_label
, mod
, tf
);
1527 switch_id
= q
->index
;
1530 case_index
= j
+ q
->index
;
1531 if (case_label_vec
.length () <= case_index
|| !case_label_vec
[case_index
])
1534 tmp
= build_int_cst (integer_type_node
, switch_id
);
1535 case_lab
= build_case_label (tmp
, NULL
,
1536 create_artificial_label (tf_loc
));
1537 /* We store the cont_stmt in the pointer map, so that we can recover
1538 it in the loop below. */
1540 cont_map
= new hash_map
<tree
, gimple
>;
1541 cont_map
->put (case_lab
, q
->cont_stmt
);
1542 case_label_vec
.quick_push (case_lab
);
1545 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1549 last_case
= case_label_vec
[j
];
1551 gcc_assert (last_case
);
1552 gcc_assert (cont_map
);
1554 cont_stmt
= *cont_map
->get (last_case
);
1556 x
= gimple_build_label (CASE_LABEL (last_case
));
1557 gimple_seq_add_stmt (&switch_body
, x
);
1558 gimple_seq_add_stmt (&switch_body
, cont_stmt
);
1559 maybe_record_in_goto_queue (state
, cont_stmt
);
1564 replace_goto_queue (tf
);
1566 /* Make sure that the last case is the default label, as one is required.
1567 Then sort the labels, which is also required in GIMPLE. */
1568 CASE_LOW (last_case
) = NULL
;
1569 tree tem
= case_label_vec
.pop ();
1570 gcc_assert (tem
== last_case
);
1571 sort_case_labels (case_label_vec
);
1573 /* Build the switch statement, setting last_case to be the default
1575 switch_stmt
= gimple_build_switch (finally_tmp
, last_case
,
1577 gimple_set_location (switch_stmt
, finally_loc
);
1579 /* Need to link SWITCH_STMT after running replace_goto_queue
1580 due to not wanting to process the same goto stmts twice. */
1581 gimple_seq_add_stmt (&tf
->top_p_seq
, switch_stmt
);
1582 gimple_seq_add_seq (&tf
->top_p_seq
, switch_body
);
1585 /* Decide whether or not we are going to duplicate the finally block.
1586 There are several considerations.
1588 First, if this is Java, then the finally block contains code
1589 written by the user. It has line numbers associated with it,
1590 so duplicating the block means it's difficult to set a breakpoint.
1591 Since controlling code generation via -g is verboten, we simply
1592 never duplicate code without optimization.
1594 Second, we'd like to prevent egregious code growth. One way to
1595 do this is to estimate the size of the finally block, multiply
1596 that by the number of copies we'd need to make, and compare against
1597 the estimate of the size of the switch machinery we'd have to add. */
1600 decide_copy_try_finally (int ndests
, bool may_throw
, gimple_seq finally
)
1602 int f_estimate
, sw_estimate
;
1605 /* If there's an EH_ELSE involved, the exception path is separate
1606 and really doesn't come into play for this computation. */
1607 eh_else
= get_eh_else (finally
);
1610 ndests
-= may_throw
;
1611 finally
= gimple_eh_else_n_body (eh_else
);
1616 gimple_stmt_iterator gsi
;
1621 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1623 gimple stmt
= gsi_stmt (gsi
);
1624 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
1630 /* Finally estimate N times, plus N gotos. */
1631 f_estimate
= count_insns_seq (finally
, &eni_size_weights
);
1632 f_estimate
= (f_estimate
+ 1) * ndests
;
1634 /* Switch statement (cost 10), N variable assignments, N gotos. */
1635 sw_estimate
= 10 + 2 * ndests
;
1637 /* Optimize for size clearly wants our best guess. */
1638 if (optimize_function_for_size_p (cfun
))
1639 return f_estimate
< sw_estimate
;
1641 /* ??? These numbers are completely made up so far. */
1643 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1645 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1648 /* REG is the enclosing region for a possible cleanup region, or the region
1649 itself. Returns TRUE if such a region would be unreachable.
1651 Cleanup regions within a must-not-throw region aren't actually reachable
1652 even if there are throwing stmts within them, because the personality
1653 routine will call terminate before unwinding. */
1656 cleanup_is_dead_in (eh_region reg
)
1658 while (reg
&& reg
->type
== ERT_CLEANUP
)
1660 return (reg
&& reg
->type
== ERT_MUST_NOT_THROW
);
1663 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1664 to a sequence of labels and blocks, plus the exception region trees
1665 that record all the magic. This is complicated by the need to
1666 arrange for the FINALLY block to be executed on all exits. */
1669 lower_try_finally (struct leh_state
*state
, gtry
*tp
)
1671 struct leh_tf_state this_tf
;
1672 struct leh_state this_state
;
1674 gimple_seq old_eh_seq
;
1676 /* Process the try block. */
1678 memset (&this_tf
, 0, sizeof (this_tf
));
1679 this_tf
.try_finally_expr
= tp
;
1681 this_tf
.outer
= state
;
1682 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state
->cur_region
))
1684 this_tf
.region
= gen_eh_region_cleanup (state
->cur_region
);
1685 this_state
.cur_region
= this_tf
.region
;
1689 this_tf
.region
= NULL
;
1690 this_state
.cur_region
= state
->cur_region
;
1693 this_state
.ehp_region
= state
->ehp_region
;
1694 this_state
.tf
= &this_tf
;
1696 old_eh_seq
= eh_seq
;
1699 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1701 /* Determine if the try block is escaped through the bottom. */
1702 this_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1704 /* Determine if any exceptions are possible within the try block. */
1706 this_tf
.may_throw
= eh_region_may_contain_throw (this_tf
.region
);
1707 if (this_tf
.may_throw
)
1708 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1710 /* Determine how many edges (still) reach the finally block. Or rather,
1711 how many destinations are reached by the finally block. Use this to
1712 determine how we process the finally block itself. */
1714 ndests
= this_tf
.dest_array
.length ();
1715 ndests
+= this_tf
.may_fallthru
;
1716 ndests
+= this_tf
.may_return
;
1717 ndests
+= this_tf
.may_throw
;
1719 /* If the FINALLY block is not reachable, dike it out. */
1722 gimple_seq_add_seq (&this_tf
.top_p_seq
, gimple_try_eval (tp
));
1723 gimple_try_set_cleanup (tp
, NULL
);
1725 /* If the finally block doesn't fall through, then any destination
1726 we might try to impose there isn't reached either. There may be
1727 some minor amount of cleanup and redirection still needed. */
1728 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp
)))
1729 lower_try_finally_nofallthru (state
, &this_tf
);
1731 /* We can easily special-case redirection to a single destination. */
1732 else if (ndests
== 1)
1733 lower_try_finally_onedest (state
, &this_tf
);
1734 else if (decide_copy_try_finally (ndests
, this_tf
.may_throw
,
1735 gimple_try_cleanup (tp
)))
1736 lower_try_finally_copy (state
, &this_tf
);
1738 lower_try_finally_switch (state
, &this_tf
);
1740 /* If someone requested we add a label at the end of the transformed
1742 if (this_tf
.fallthru_label
)
1744 /* This must be reached only if ndests == 0. */
1745 gimple x
= gimple_build_label (this_tf
.fallthru_label
);
1746 gimple_seq_add_stmt (&this_tf
.top_p_seq
, x
);
1749 this_tf
.dest_array
.release ();
1750 free (this_tf
.goto_queue
);
1751 if (this_tf
.goto_queue_map
)
1752 delete this_tf
.goto_queue_map
;
1754 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1755 If there was no old eh_seq, then the append is trivially already done. */
1759 eh_seq
= old_eh_seq
;
1762 gimple_seq new_eh_seq
= eh_seq
;
1763 eh_seq
= old_eh_seq
;
1764 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1768 return this_tf
.top_p_seq
;
1771 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1772 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1773 exception region trees that records all the magic. */
1776 lower_catch (struct leh_state
*state
, gtry
*tp
)
1778 eh_region try_region
= NULL
;
1779 struct leh_state this_state
= *state
;
1780 gimple_stmt_iterator gsi
;
1782 gimple_seq new_seq
, cleanup
;
1784 location_t try_catch_loc
= gimple_location (tp
);
1786 if (flag_exceptions
)
1788 try_region
= gen_eh_region_try (state
->cur_region
);
1789 this_state
.cur_region
= try_region
;
1792 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1794 if (!eh_region_may_contain_throw (try_region
))
1795 return gimple_try_eval (tp
);
1798 emit_eh_dispatch (&new_seq
, try_region
);
1799 emit_resx (&new_seq
, try_region
);
1801 this_state
.cur_region
= state
->cur_region
;
1802 this_state
.ehp_region
= try_region
;
1804 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1805 itself, so that e.g. for coverage purposes the nested cleanups don't
1806 appear before the cleanup body. See PR64634 for details. */
1807 gimple_seq old_eh_seq
= eh_seq
;
1811 cleanup
= gimple_try_cleanup (tp
);
1812 for (gsi
= gsi_start (cleanup
);
1820 catch_stmt
= as_a
<gcatch
*> (gsi_stmt (gsi
));
1821 c
= gen_eh_region_catch (try_region
, gimple_catch_types (catch_stmt
));
1823 handler
= gimple_catch_handler (catch_stmt
);
1824 lower_eh_constructs_1 (&this_state
, &handler
);
1826 c
->label
= create_artificial_label (UNKNOWN_LOCATION
);
1827 x
= gimple_build_label (c
->label
);
1828 gimple_seq_add_stmt (&new_seq
, x
);
1830 gimple_seq_add_seq (&new_seq
, handler
);
1832 if (gimple_seq_may_fallthru (new_seq
))
1835 out_label
= create_artificial_label (try_catch_loc
);
1837 x
= gimple_build_goto (out_label
);
1838 gimple_seq_add_stmt (&new_seq
, x
);
1844 gimple_try_set_cleanup (tp
, new_seq
);
1846 gimple_seq new_eh_seq
= eh_seq
;
1847 eh_seq
= old_eh_seq
;
1848 gimple_seq ret_seq
= frob_into_branch_around (tp
, try_region
, out_label
);
1849 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1853 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1854 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1855 region trees that record all the magic. */
1858 lower_eh_filter (struct leh_state
*state
, gtry
*tp
)
1860 struct leh_state this_state
= *state
;
1861 eh_region this_region
= NULL
;
1865 inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1867 if (flag_exceptions
)
1869 this_region
= gen_eh_region_allowed (state
->cur_region
,
1870 gimple_eh_filter_types (inner
));
1871 this_state
.cur_region
= this_region
;
1874 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1876 if (!eh_region_may_contain_throw (this_region
))
1877 return gimple_try_eval (tp
);
1880 this_state
.cur_region
= state
->cur_region
;
1881 this_state
.ehp_region
= this_region
;
1883 emit_eh_dispatch (&new_seq
, this_region
);
1884 emit_resx (&new_seq
, this_region
);
1886 this_region
->u
.allowed
.label
= create_artificial_label (UNKNOWN_LOCATION
);
1887 x
= gimple_build_label (this_region
->u
.allowed
.label
);
1888 gimple_seq_add_stmt (&new_seq
, x
);
1890 lower_eh_constructs_1 (&this_state
, gimple_eh_filter_failure_ptr (inner
));
1891 gimple_seq_add_seq (&new_seq
, gimple_eh_filter_failure (inner
));
1893 gimple_try_set_cleanup (tp
, new_seq
);
1895 return frob_into_branch_around (tp
, this_region
, NULL
);
1898 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1899 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1900 plus the exception region trees that record all the magic. */
1903 lower_eh_must_not_throw (struct leh_state
*state
, gtry
*tp
)
1905 struct leh_state this_state
= *state
;
1907 if (flag_exceptions
)
1909 gimple inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1910 eh_region this_region
;
1912 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1913 this_region
->u
.must_not_throw
.failure_decl
1914 = gimple_eh_must_not_throw_fndecl (
1915 as_a
<geh_mnt
*> (inner
));
1916 this_region
->u
.must_not_throw
.failure_loc
1917 = LOCATION_LOCUS (gimple_location (tp
));
1919 /* In order to get mangling applied to this decl, we must mark it
1920 used now. Otherwise, pass_ipa_free_lang_data won't think it
1922 TREE_USED (this_region
->u
.must_not_throw
.failure_decl
) = 1;
1924 this_state
.cur_region
= this_region
;
1927 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1929 return gimple_try_eval (tp
);
1932 /* Implement a cleanup expression. This is similar to try-finally,
1933 except that we only execute the cleanup block for exception edges. */
1936 lower_cleanup (struct leh_state
*state
, gtry
*tp
)
1938 struct leh_state this_state
= *state
;
1939 eh_region this_region
= NULL
;
1940 struct leh_tf_state fake_tf
;
1942 bool cleanup_dead
= cleanup_is_dead_in (state
->cur_region
);
1944 if (flag_exceptions
&& !cleanup_dead
)
1946 this_region
= gen_eh_region_cleanup (state
->cur_region
);
1947 this_state
.cur_region
= this_region
;
1950 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1952 if (cleanup_dead
|| !eh_region_may_contain_throw (this_region
))
1953 return gimple_try_eval (tp
);
1955 /* Build enough of a try-finally state so that we can reuse
1956 honor_protect_cleanup_actions. */
1957 memset (&fake_tf
, 0, sizeof (fake_tf
));
1958 fake_tf
.top_p
= fake_tf
.try_finally_expr
= tp
;
1959 fake_tf
.outer
= state
;
1960 fake_tf
.region
= this_region
;
1961 fake_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1962 fake_tf
.may_throw
= true;
1964 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1966 if (fake_tf
.may_throw
)
1968 /* In this case honor_protect_cleanup_actions had nothing to do,
1969 and we should process this normally. */
1970 lower_eh_constructs_1 (state
, gimple_try_cleanup_ptr (tp
));
1971 result
= frob_into_branch_around (tp
, this_region
,
1972 fake_tf
.fallthru_label
);
1976 /* In this case honor_protect_cleanup_actions did nearly all of
1977 the work. All we have left is to append the fallthru_label. */
1979 result
= gimple_try_eval (tp
);
1980 if (fake_tf
.fallthru_label
)
1982 gimple x
= gimple_build_label (fake_tf
.fallthru_label
);
1983 gimple_seq_add_stmt (&result
, x
);
1989 /* Main loop for lowering eh constructs. Also moves gsi to the next
1993 lower_eh_constructs_2 (struct leh_state
*state
, gimple_stmt_iterator
*gsi
)
1997 gimple stmt
= gsi_stmt (*gsi
);
1999 switch (gimple_code (stmt
))
2003 tree fndecl
= gimple_call_fndecl (stmt
);
2006 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2007 switch (DECL_FUNCTION_CODE (fndecl
))
2009 case BUILT_IN_EH_POINTER
:
2010 /* The front end may have generated a call to
2011 __builtin_eh_pointer (0) within a catch region. Replace
2012 this zero argument with the current catch region number. */
2013 if (state
->ehp_region
)
2015 tree nr
= build_int_cst (integer_type_node
,
2016 state
->ehp_region
->index
);
2017 gimple_call_set_arg (stmt
, 0, nr
);
2021 /* The user has dome something silly. Remove it. */
2022 rhs
= null_pointer_node
;
2027 case BUILT_IN_EH_FILTER
:
2028 /* ??? This should never appear, but since it's a builtin it
2029 is accessible to abuse by users. Just remove it and
2030 replace the use with the arbitrary value zero. */
2031 rhs
= build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
2033 lhs
= gimple_call_lhs (stmt
);
2034 x
= gimple_build_assign (lhs
, rhs
);
2035 gsi_insert_before (gsi
, x
, GSI_SAME_STMT
);
2038 case BUILT_IN_EH_COPY_VALUES
:
2039 /* Likewise this should not appear. Remove it. */
2040 gsi_remove (gsi
, true);
2050 /* If the stmt can throw use a new temporary for the assignment
2051 to a LHS. This makes sure the old value of the LHS is
2052 available on the EH edge. Only do so for statements that
2053 potentially fall through (no noreturn calls e.g.), otherwise
2054 this new assignment might create fake fallthru regions. */
2055 if (stmt_could_throw_p (stmt
)
2056 && gimple_has_lhs (stmt
)
2057 && gimple_stmt_may_fallthru (stmt
)
2058 && !tree_could_throw_p (gimple_get_lhs (stmt
))
2059 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
2061 tree lhs
= gimple_get_lhs (stmt
);
2062 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
2063 gimple s
= gimple_build_assign (lhs
, tmp
);
2064 gimple_set_location (s
, gimple_location (stmt
));
2065 gimple_set_block (s
, gimple_block (stmt
));
2066 gimple_set_lhs (stmt
, tmp
);
2067 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
2068 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
2069 DECL_GIMPLE_REG_P (tmp
) = 1;
2070 gsi_insert_after (gsi
, s
, GSI_SAME_STMT
);
2072 /* Look for things that can throw exceptions, and record them. */
2073 if (state
->cur_region
&& stmt_could_throw_p (stmt
))
2075 record_stmt_eh_region (state
->cur_region
, stmt
);
2076 note_eh_region_may_contain_throw (state
->cur_region
);
2083 maybe_record_in_goto_queue (state
, stmt
);
2087 verify_norecord_switch_expr (state
, as_a
<gswitch
*> (stmt
));
2092 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2093 if (gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2094 replace
= lower_try_finally (state
, try_stmt
);
2097 x
= gimple_seq_first_stmt (gimple_try_cleanup (try_stmt
));
2100 replace
= gimple_try_eval (try_stmt
);
2101 lower_eh_constructs_1 (state
, &replace
);
2104 switch (gimple_code (x
))
2107 replace
= lower_catch (state
, try_stmt
);
2109 case GIMPLE_EH_FILTER
:
2110 replace
= lower_eh_filter (state
, try_stmt
);
2112 case GIMPLE_EH_MUST_NOT_THROW
:
2113 replace
= lower_eh_must_not_throw (state
, try_stmt
);
2115 case GIMPLE_EH_ELSE
:
2116 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2119 replace
= lower_cleanup (state
, try_stmt
);
2125 /* Remove the old stmt and insert the transformed sequence
2127 gsi_insert_seq_before (gsi
, replace
, GSI_SAME_STMT
);
2128 gsi_remove (gsi
, true);
2130 /* Return since we don't want gsi_next () */
2133 case GIMPLE_EH_ELSE
:
2134 /* We should be eliminating this in lower_try_finally et al. */
2138 /* A type, a decl, or some kind of statement that we're not
2139 interested in. Don't walk them. */
2146 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2149 lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*pseq
)
2151 gimple_stmt_iterator gsi
;
2152 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
);)
2153 lower_eh_constructs_2 (state
, &gsi
);
2158 const pass_data pass_data_lower_eh
=
2160 GIMPLE_PASS
, /* type */
2162 OPTGROUP_NONE
, /* optinfo_flags */
2163 TV_TREE_EH
, /* tv_id */
2164 PROP_gimple_lcf
, /* properties_required */
2165 PROP_gimple_leh
, /* properties_provided */
2166 0, /* properties_destroyed */
2167 0, /* todo_flags_start */
2168 0, /* todo_flags_finish */
2171 class pass_lower_eh
: public gimple_opt_pass
2174 pass_lower_eh (gcc::context
*ctxt
)
2175 : gimple_opt_pass (pass_data_lower_eh
, ctxt
)
2178 /* opt_pass methods: */
2179 virtual unsigned int execute (function
*);
2181 }; // class pass_lower_eh
2184 pass_lower_eh::execute (function
*fun
)
2186 struct leh_state null_state
;
2189 bodyp
= gimple_body (current_function_decl
);
2193 finally_tree
= new hash_table
<finally_tree_hasher
> (31);
2194 eh_region_may_contain_throw_map
= BITMAP_ALLOC (NULL
);
2195 memset (&null_state
, 0, sizeof (null_state
));
2197 collect_finally_tree_1 (bodyp
, NULL
);
2198 lower_eh_constructs_1 (&null_state
, &bodyp
);
2199 gimple_set_body (current_function_decl
, bodyp
);
2201 /* We assume there's a return statement, or something, at the end of
2202 the function, and thus ploping the EH sequence afterward won't
2204 gcc_assert (!gimple_seq_may_fallthru (bodyp
));
2205 gimple_seq_add_seq (&bodyp
, eh_seq
);
2207 /* We assume that since BODYP already existed, adding EH_SEQ to it
2208 didn't change its value, and we don't have to re-set the function. */
2209 gcc_assert (bodyp
== gimple_body (current_function_decl
));
2211 delete finally_tree
;
2212 finally_tree
= NULL
;
2213 BITMAP_FREE (eh_region_may_contain_throw_map
);
2216 /* If this function needs a language specific EH personality routine
2217 and the frontend didn't already set one do so now. */
2218 if (function_needs_eh_personality (fun
) == eh_personality_lang
2219 && !DECL_FUNCTION_PERSONALITY (current_function_decl
))
2220 DECL_FUNCTION_PERSONALITY (current_function_decl
)
2221 = lang_hooks
.eh_personality ();
2229 make_pass_lower_eh (gcc::context
*ctxt
)
2231 return new pass_lower_eh (ctxt
);
2234 /* Create the multiple edges from an EH_DISPATCH statement to all of
2235 the possible handlers for its EH region. Return true if there's
2236 no fallthru edge; false if there is. */
2239 make_eh_dispatch_edges (geh_dispatch
*stmt
)
2243 basic_block src
, dst
;
2245 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2246 src
= gimple_bb (stmt
);
2251 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2253 dst
= label_to_block (c
->label
);
2254 make_edge (src
, dst
, 0);
2256 /* A catch-all handler doesn't have a fallthru. */
2257 if (c
->type_list
== NULL
)
2262 case ERT_ALLOWED_EXCEPTIONS
:
2263 dst
= label_to_block (r
->u
.allowed
.label
);
2264 make_edge (src
, dst
, 0);
2274 /* Create the single EH edge from STMT to its nearest landing pad,
2275 if there is such a landing pad within the current function. */
2278 make_eh_edges (gimple stmt
)
2280 basic_block src
, dst
;
2284 lp_nr
= lookup_stmt_eh_lp (stmt
);
2288 lp
= get_eh_landing_pad_from_number (lp_nr
);
2289 gcc_assert (lp
!= NULL
);
2291 src
= gimple_bb (stmt
);
2292 dst
= label_to_block (lp
->post_landing_pad
);
2293 make_edge (src
, dst
, EDGE_EH
);
2296 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2297 do not actually perform the final edge redirection.
2299 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2300 we intend to change the destination EH region as well; this means
2301 EH_LANDING_PAD_NR must already be set on the destination block label.
2302 If false, we're being called from generic cfg manipulation code and we
2303 should preserve our place within the region tree. */
2306 redirect_eh_edge_1 (edge edge_in
, basic_block new_bb
, bool change_region
)
2308 eh_landing_pad old_lp
, new_lp
;
2311 int old_lp_nr
, new_lp_nr
;
2312 tree old_label
, new_label
;
2316 old_bb
= edge_in
->dest
;
2317 old_label
= gimple_block_label (old_bb
);
2318 old_lp_nr
= EH_LANDING_PAD_NR (old_label
);
2319 gcc_assert (old_lp_nr
> 0);
2320 old_lp
= get_eh_landing_pad_from_number (old_lp_nr
);
2322 throw_stmt
= last_stmt (edge_in
->src
);
2323 gcc_assert (lookup_stmt_eh_lp (throw_stmt
) == old_lp_nr
);
2325 new_label
= gimple_block_label (new_bb
);
2327 /* Look for an existing region that might be using NEW_BB already. */
2328 new_lp_nr
= EH_LANDING_PAD_NR (new_label
);
2331 new_lp
= get_eh_landing_pad_from_number (new_lp_nr
);
2332 gcc_assert (new_lp
);
2334 /* Unless CHANGE_REGION is true, the new and old landing pad
2335 had better be associated with the same EH region. */
2336 gcc_assert (change_region
|| new_lp
->region
== old_lp
->region
);
2341 gcc_assert (!change_region
);
2344 /* Notice when we redirect the last EH edge away from OLD_BB. */
2345 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
2346 if (e
!= edge_in
&& (e
->flags
& EDGE_EH
))
2351 /* NEW_LP already exists. If there are still edges into OLD_LP,
2352 there's nothing to do with the EH tree. If there are no more
2353 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2354 If CHANGE_REGION is true, then our caller is expecting to remove
2356 if (e
== NULL
&& !change_region
)
2357 remove_eh_landing_pad (old_lp
);
2361 /* No correct landing pad exists. If there are no more edges
2362 into OLD_LP, then we can simply re-use the existing landing pad.
2363 Otherwise, we have to create a new landing pad. */
2366 EH_LANDING_PAD_NR (old_lp
->post_landing_pad
) = 0;
2370 new_lp
= gen_eh_landing_pad (old_lp
->region
);
2371 new_lp
->post_landing_pad
= new_label
;
2372 EH_LANDING_PAD_NR (new_label
) = new_lp
->index
;
2375 /* Maybe move the throwing statement to the new region. */
2376 if (old_lp
!= new_lp
)
2378 remove_stmt_from_eh_lp (throw_stmt
);
2379 add_stmt_to_eh_lp (throw_stmt
, new_lp
->index
);
2383 /* Redirect EH edge E to NEW_BB. */
2386 redirect_eh_edge (edge edge_in
, basic_block new_bb
)
2388 redirect_eh_edge_1 (edge_in
, new_bb
, false);
2389 return ssa_redirect_edge (edge_in
, new_bb
);
2392 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2393 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2394 The actual edge update will happen in the caller. */
2397 redirect_eh_dispatch_edge (geh_dispatch
*stmt
, edge e
, basic_block new_bb
)
2399 tree new_lab
= gimple_block_label (new_bb
);
2400 bool any_changed
= false;
2405 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2409 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2411 old_bb
= label_to_block (c
->label
);
2412 if (old_bb
== e
->dest
)
2420 case ERT_ALLOWED_EXCEPTIONS
:
2421 old_bb
= label_to_block (r
->u
.allowed
.label
);
2422 gcc_assert (old_bb
== e
->dest
);
2423 r
->u
.allowed
.label
= new_lab
;
2431 gcc_assert (any_changed
);
2434 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2437 operation_could_trap_helper_p (enum tree_code op
,
2448 case TRUNC_DIV_EXPR
:
2450 case FLOOR_DIV_EXPR
:
2451 case ROUND_DIV_EXPR
:
2452 case EXACT_DIV_EXPR
:
2454 case FLOOR_MOD_EXPR
:
2455 case ROUND_MOD_EXPR
:
2456 case TRUNC_MOD_EXPR
:
2458 if (honor_snans
|| honor_trapv
)
2461 return flag_trapping_math
;
2462 if (!TREE_CONSTANT (divisor
) || integer_zerop (divisor
))
2471 /* Some floating point comparisons may trap. */
2476 case UNORDERED_EXPR
:
2488 /* These operations don't trap with floating point. */
2496 /* Any floating arithmetic may trap. */
2497 if (fp_operation
&& flag_trapping_math
)
2505 /* Constructing an object cannot trap. */
2509 /* Any floating arithmetic may trap. */
2510 if (fp_operation
&& flag_trapping_math
)
2518 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2519 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2520 type operands that may trap. If OP is a division operator, DIVISOR contains
2521 the value of the divisor. */
2524 operation_could_trap_p (enum tree_code op
, bool fp_operation
, bool honor_trapv
,
2527 bool honor_nans
= (fp_operation
&& flag_trapping_math
2528 && !flag_finite_math_only
);
2529 bool honor_snans
= fp_operation
&& flag_signaling_nans
!= 0;
2532 if (TREE_CODE_CLASS (op
) != tcc_comparison
2533 && TREE_CODE_CLASS (op
) != tcc_unary
2534 && TREE_CODE_CLASS (op
) != tcc_binary
)
2537 return operation_could_trap_helper_p (op
, fp_operation
, honor_trapv
,
2538 honor_nans
, honor_snans
, divisor
,
2543 /* Returns true if it is possible to prove that the index of
2544 an array access REF (an ARRAY_REF expression) falls into the
2548 in_array_bounds_p (tree ref
)
2550 tree idx
= TREE_OPERAND (ref
, 1);
2553 if (TREE_CODE (idx
) != INTEGER_CST
)
2556 min
= array_ref_low_bound (ref
);
2557 max
= array_ref_up_bound (ref
);
2560 || TREE_CODE (min
) != INTEGER_CST
2561 || TREE_CODE (max
) != INTEGER_CST
)
2564 if (tree_int_cst_lt (idx
, min
)
2565 || tree_int_cst_lt (max
, idx
))
2571 /* Returns true if it is possible to prove that the range of
2572 an array access REF (an ARRAY_RANGE_REF expression) falls
2573 into the array bounds. */
2576 range_in_array_bounds_p (tree ref
)
2578 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ref
));
2579 tree range_min
, range_max
, min
, max
;
2581 range_min
= TYPE_MIN_VALUE (domain_type
);
2582 range_max
= TYPE_MAX_VALUE (domain_type
);
2585 || TREE_CODE (range_min
) != INTEGER_CST
2586 || TREE_CODE (range_max
) != INTEGER_CST
)
2589 min
= array_ref_low_bound (ref
);
2590 max
= array_ref_up_bound (ref
);
2593 || TREE_CODE (min
) != INTEGER_CST
2594 || TREE_CODE (max
) != INTEGER_CST
)
2597 if (tree_int_cst_lt (range_min
, min
)
2598 || tree_int_cst_lt (max
, range_max
))
2604 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2605 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2606 This routine expects only GIMPLE lhs or rhs input. */
2609 tree_could_trap_p (tree expr
)
2611 enum tree_code code
;
2612 bool fp_operation
= false;
2613 bool honor_trapv
= false;
2614 tree t
, base
, div
= NULL_TREE
;
2619 code
= TREE_CODE (expr
);
2620 t
= TREE_TYPE (expr
);
2624 if (COMPARISON_CLASS_P (expr
))
2625 fp_operation
= FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2627 fp_operation
= FLOAT_TYPE_P (t
);
2628 honor_trapv
= INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
);
2631 if (TREE_CODE_CLASS (code
) == tcc_binary
)
2632 div
= TREE_OPERAND (expr
, 1);
2633 if (operation_could_trap_p (code
, fp_operation
, honor_trapv
, div
))
2643 case VIEW_CONVERT_EXPR
:
2644 case WITH_SIZE_EXPR
:
2645 expr
= TREE_OPERAND (expr
, 0);
2646 code
= TREE_CODE (expr
);
2649 case ARRAY_RANGE_REF
:
2650 base
= TREE_OPERAND (expr
, 0);
2651 if (tree_could_trap_p (base
))
2653 if (TREE_THIS_NOTRAP (expr
))
2655 return !range_in_array_bounds_p (expr
);
2658 base
= TREE_OPERAND (expr
, 0);
2659 if (tree_could_trap_p (base
))
2661 if (TREE_THIS_NOTRAP (expr
))
2663 return !in_array_bounds_p (expr
);
2665 case TARGET_MEM_REF
:
2667 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
2668 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
2670 if (TREE_THIS_NOTRAP (expr
))
2672 /* We cannot prove that the access is in-bounds when we have
2673 variable-index TARGET_MEM_REFs. */
2674 if (code
== TARGET_MEM_REF
2675 && (TMR_INDEX (expr
) || TMR_INDEX2 (expr
)))
2677 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
2679 tree base
= TREE_OPERAND (TREE_OPERAND (expr
, 0), 0);
2680 offset_int off
= mem_ref_offset (expr
);
2681 if (wi::neg_p (off
, SIGNED
))
2683 if (TREE_CODE (base
) == STRING_CST
)
2684 return wi::leu_p (TREE_STRING_LENGTH (base
), off
);
2685 else if (DECL_SIZE_UNIT (base
) == NULL_TREE
2686 || TREE_CODE (DECL_SIZE_UNIT (base
)) != INTEGER_CST
2687 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base
)), off
))
2689 /* Now we are sure the first byte of the access is inside
2696 return !TREE_THIS_NOTRAP (expr
);
2699 return TREE_THIS_VOLATILE (expr
);
2702 t
= get_callee_fndecl (expr
);
2703 /* Assume that calls to weak functions may trap. */
2704 if (!t
|| !DECL_P (t
))
2707 return tree_could_trap_p (t
);
2711 /* Assume that accesses to weak functions may trap, unless we know
2712 they are certainly defined in current TU or in some other
2714 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2716 cgraph_node
*node
= cgraph_node::get (expr
);
2718 node
= node
->function_symbol ();
2719 return !(node
&& node
->in_other_partition
);
2724 /* Assume that accesses to weak vars may trap, unless we know
2725 they are certainly defined in current TU or in some other
2727 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2729 varpool_node
*node
= varpool_node::get (expr
);
2731 node
= node
->ultimate_alias_target ();
2732 return !(node
&& node
->in_other_partition
);
2742 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2743 an assignment or a conditional) may throw. */
2746 stmt_could_throw_1_p (gimple stmt
)
2748 enum tree_code code
= gimple_expr_code (stmt
);
2749 bool honor_nans
= false;
2750 bool honor_snans
= false;
2751 bool fp_operation
= false;
2752 bool honor_trapv
= false;
2757 if (TREE_CODE_CLASS (code
) == tcc_comparison
2758 || TREE_CODE_CLASS (code
) == tcc_unary
2759 || TREE_CODE_CLASS (code
) == tcc_binary
)
2761 if (is_gimple_assign (stmt
)
2762 && TREE_CODE_CLASS (code
) == tcc_comparison
)
2763 t
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2764 else if (gimple_code (stmt
) == GIMPLE_COND
)
2765 t
= TREE_TYPE (gimple_cond_lhs (stmt
));
2767 t
= gimple_expr_type (stmt
);
2768 fp_operation
= FLOAT_TYPE_P (t
);
2771 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
2772 honor_snans
= flag_signaling_nans
!= 0;
2774 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
2778 /* Check if the main expression may trap. */
2779 t
= is_gimple_assign (stmt
) ? gimple_assign_rhs2 (stmt
) : NULL
;
2780 ret
= operation_could_trap_helper_p (code
, fp_operation
, honor_trapv
,
2781 honor_nans
, honor_snans
, t
,
2786 /* If the expression does not trap, see if any of the individual operands may
2788 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2789 if (tree_could_trap_p (gimple_op (stmt
, i
)))
2796 /* Return true if statement STMT could throw an exception. */
2799 stmt_could_throw_p (gimple stmt
)
2801 if (!flag_exceptions
)
2804 /* The only statements that can throw an exception are assignments,
2805 conditionals, calls, resx, and asms. */
2806 switch (gimple_code (stmt
))
2812 return !gimple_call_nothrow_p (as_a
<gcall
*> (stmt
));
2816 if (!cfun
->can_throw_non_call_exceptions
)
2818 return stmt_could_throw_1_p (stmt
);
2821 if (!cfun
->can_throw_non_call_exceptions
)
2823 return gimple_asm_volatile_p (as_a
<gasm
*> (stmt
));
2831 /* Return true if expression T could throw an exception. */
2834 tree_could_throw_p (tree t
)
2836 if (!flag_exceptions
)
2838 if (TREE_CODE (t
) == MODIFY_EXPR
)
2840 if (cfun
->can_throw_non_call_exceptions
2841 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2843 t
= TREE_OPERAND (t
, 1);
2846 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2847 t
= TREE_OPERAND (t
, 0);
2848 if (TREE_CODE (t
) == CALL_EXPR
)
2849 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2850 if (cfun
->can_throw_non_call_exceptions
)
2851 return tree_could_trap_p (t
);
2855 /* Return true if STMT can throw an exception that is not caught within
2856 the current function (CFUN). */
2859 stmt_can_throw_external (gimple stmt
)
2863 if (!stmt_could_throw_p (stmt
))
2866 lp_nr
= lookup_stmt_eh_lp (stmt
);
2870 /* Return true if STMT can throw an exception that is caught within
2871 the current function (CFUN). */
2874 stmt_can_throw_internal (gimple stmt
)
2878 if (!stmt_could_throw_p (stmt
))
2881 lp_nr
= lookup_stmt_eh_lp (stmt
);
2885 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2886 remove any entry it might have from the EH table. Return true if
2887 any change was made. */
2890 maybe_clean_eh_stmt_fn (struct function
*ifun
, gimple stmt
)
2892 if (stmt_could_throw_p (stmt
))
2894 return remove_stmt_from_eh_lp_fn (ifun
, stmt
);
2897 /* Likewise, but always use the current function. */
2900 maybe_clean_eh_stmt (gimple stmt
)
2902 return maybe_clean_eh_stmt_fn (cfun
, stmt
);
2905 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2906 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2907 in the table if it should be in there. Return TRUE if a replacement was
2908 done that my require an EH edge purge. */
2911 maybe_clean_or_replace_eh_stmt (gimple old_stmt
, gimple new_stmt
)
2913 int lp_nr
= lookup_stmt_eh_lp (old_stmt
);
2917 bool new_stmt_could_throw
= stmt_could_throw_p (new_stmt
);
2919 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
2922 remove_stmt_from_eh_lp (old_stmt
);
2923 if (new_stmt_could_throw
)
2925 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
2935 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2936 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2937 operand is the return value of duplicate_eh_regions. */
2940 maybe_duplicate_eh_stmt_fn (struct function
*new_fun
, gimple new_stmt
,
2941 struct function
*old_fun
, gimple old_stmt
,
2942 hash_map
<void *, void *> *map
,
2945 int old_lp_nr
, new_lp_nr
;
2947 if (!stmt_could_throw_p (new_stmt
))
2950 old_lp_nr
= lookup_stmt_eh_lp_fn (old_fun
, old_stmt
);
2953 if (default_lp_nr
== 0)
2955 new_lp_nr
= default_lp_nr
;
2957 else if (old_lp_nr
> 0)
2959 eh_landing_pad old_lp
, new_lp
;
2961 old_lp
= (*old_fun
->eh
->lp_array
)[old_lp_nr
];
2962 new_lp
= static_cast<eh_landing_pad
> (*map
->get (old_lp
));
2963 new_lp_nr
= new_lp
->index
;
2967 eh_region old_r
, new_r
;
2969 old_r
= (*old_fun
->eh
->region_array
)[-old_lp_nr
];
2970 new_r
= static_cast<eh_region
> (*map
->get (old_r
));
2971 new_lp_nr
= -new_r
->index
;
2974 add_stmt_to_eh_lp_fn (new_fun
, new_stmt
, new_lp_nr
);
2978 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2979 and thus no remapping is required. */
2982 maybe_duplicate_eh_stmt (gimple new_stmt
, gimple old_stmt
)
2986 if (!stmt_could_throw_p (new_stmt
))
2989 lp_nr
= lookup_stmt_eh_lp (old_stmt
);
2993 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
2997 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2998 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2999 this only handles handlers consisting of a single call, as that's the
3000 important case for C++: a destructor call for a particular object showing
3001 up in multiple handlers. */
3004 same_handler_p (gimple_seq oneh
, gimple_seq twoh
)
3006 gimple_stmt_iterator gsi
;
3010 gsi
= gsi_start (oneh
);
3011 if (!gsi_one_before_end_p (gsi
))
3013 ones
= gsi_stmt (gsi
);
3015 gsi
= gsi_start (twoh
);
3016 if (!gsi_one_before_end_p (gsi
))
3018 twos
= gsi_stmt (gsi
);
3020 if (!is_gimple_call (ones
)
3021 || !is_gimple_call (twos
)
3022 || gimple_call_lhs (ones
)
3023 || gimple_call_lhs (twos
)
3024 || gimple_call_chain (ones
)
3025 || gimple_call_chain (twos
)
3026 || !gimple_call_same_target_p (ones
, twos
)
3027 || gimple_call_num_args (ones
) != gimple_call_num_args (twos
))
3030 for (ai
= 0; ai
< gimple_call_num_args (ones
); ++ai
)
3031 if (!operand_equal_p (gimple_call_arg (ones
, ai
),
3032 gimple_call_arg (twos
, ai
), 0))
3039 try { A() } finally { try { ~B() } catch { ~A() } }
3040 try { ... } finally { ~A() }
3042 try { A() } catch { ~B() }
3043 try { ~B() ... } finally { ~A() }
3045 This occurs frequently in C++, where A is a local variable and B is a
3046 temporary used in the initializer for A. */
3049 optimize_double_finally (gtry
*one
, gtry
*two
)
3052 gimple_stmt_iterator gsi
;
3055 cleanup
= gimple_try_cleanup (one
);
3056 gsi
= gsi_start (cleanup
);
3057 if (!gsi_one_before_end_p (gsi
))
3060 oneh
= gsi_stmt (gsi
);
3061 if (gimple_code (oneh
) != GIMPLE_TRY
3062 || gimple_try_kind (oneh
) != GIMPLE_TRY_CATCH
)
3065 if (same_handler_p (gimple_try_cleanup (oneh
), gimple_try_cleanup (two
)))
3067 gimple_seq seq
= gimple_try_eval (oneh
);
3069 gimple_try_set_cleanup (one
, seq
);
3070 gimple_try_set_kind (one
, GIMPLE_TRY_CATCH
);
3071 seq
= copy_gimple_seq_and_replace_locals (seq
);
3072 gimple_seq_add_seq (&seq
, gimple_try_eval (two
));
3073 gimple_try_set_eval (two
, seq
);
3077 /* Perform EH refactoring optimizations that are simpler to do when code
3078 flow has been lowered but EH structures haven't. */
3081 refactor_eh_r (gimple_seq seq
)
3083 gimple_stmt_iterator gsi
;
3088 gsi
= gsi_start (seq
);
3092 if (gsi_end_p (gsi
))
3095 two
= gsi_stmt (gsi
);
3097 if (gtry
*try_one
= dyn_cast
<gtry
*> (one
))
3098 if (gtry
*try_two
= dyn_cast
<gtry
*> (two
))
3099 if (gimple_try_kind (try_one
) == GIMPLE_TRY_FINALLY
3100 && gimple_try_kind (try_two
) == GIMPLE_TRY_FINALLY
)
3101 optimize_double_finally (try_one
, try_two
);
3103 switch (gimple_code (one
))
3106 refactor_eh_r (gimple_try_eval (one
));
3107 refactor_eh_r (gimple_try_cleanup (one
));
3110 refactor_eh_r (gimple_catch_handler (as_a
<gcatch
*> (one
)));
3112 case GIMPLE_EH_FILTER
:
3113 refactor_eh_r (gimple_eh_filter_failure (one
));
3115 case GIMPLE_EH_ELSE
:
3117 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (one
);
3118 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt
));
3119 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt
));
3134 const pass_data pass_data_refactor_eh
=
3136 GIMPLE_PASS
, /* type */
3138 OPTGROUP_NONE
, /* optinfo_flags */
3139 TV_TREE_EH
, /* tv_id */
3140 PROP_gimple_lcf
, /* properties_required */
3141 0, /* properties_provided */
3142 0, /* properties_destroyed */
3143 0, /* todo_flags_start */
3144 0, /* todo_flags_finish */
3147 class pass_refactor_eh
: public gimple_opt_pass
3150 pass_refactor_eh (gcc::context
*ctxt
)
3151 : gimple_opt_pass (pass_data_refactor_eh
, ctxt
)
3154 /* opt_pass methods: */
3155 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3156 virtual unsigned int execute (function
*)
3158 refactor_eh_r (gimple_body (current_function_decl
));
3162 }; // class pass_refactor_eh
3167 make_pass_refactor_eh (gcc::context
*ctxt
)
3169 return new pass_refactor_eh (ctxt
);
3172 /* At the end of gimple optimization, we can lower RESX. */
3175 lower_resx (basic_block bb
, gresx
*stmt
,
3176 hash_map
<eh_region
, tree
> *mnt_map
)
3179 eh_region src_r
, dst_r
;
3180 gimple_stmt_iterator gsi
;
3185 lp_nr
= lookup_stmt_eh_lp (stmt
);
3187 dst_r
= get_eh_region_from_lp_number (lp_nr
);
3191 src_r
= get_eh_region_from_number (gimple_resx_region (stmt
));
3192 gsi
= gsi_last_bb (bb
);
3196 /* We can wind up with no source region when pass_cleanup_eh shows
3197 that there are no entries into an eh region and deletes it, but
3198 then the block that contains the resx isn't removed. This can
3199 happen without optimization when the switch statement created by
3200 lower_try_finally_switch isn't simplified to remove the eh case.
3202 Resolve this by expanding the resx node to an abort. */
3204 fn
= builtin_decl_implicit (BUILT_IN_TRAP
);
3205 x
= gimple_build_call (fn
, 0);
3206 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3208 while (EDGE_COUNT (bb
->succs
) > 0)
3209 remove_edge (EDGE_SUCC (bb
, 0));
3213 /* When we have a destination region, we resolve this by copying
3214 the excptr and filter values into place, and changing the edge
3215 to immediately after the landing pad. */
3223 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3224 the failure decl into a new block, if needed. */
3225 gcc_assert (dst_r
->type
== ERT_MUST_NOT_THROW
);
3227 tree
*slot
= mnt_map
->get (dst_r
);
3230 gimple_stmt_iterator gsi2
;
3232 new_bb
= create_empty_bb (bb
);
3233 add_bb_to_loop (new_bb
, bb
->loop_father
);
3234 lab
= gimple_block_label (new_bb
);
3235 gsi2
= gsi_start_bb (new_bb
);
3237 fn
= dst_r
->u
.must_not_throw
.failure_decl
;
3238 x
= gimple_build_call (fn
, 0);
3239 gimple_set_location (x
, dst_r
->u
.must_not_throw
.failure_loc
);
3240 gsi_insert_after (&gsi2
, x
, GSI_CONTINUE_LINKING
);
3242 mnt_map
->put (dst_r
, lab
);
3247 new_bb
= label_to_block (lab
);
3250 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3251 e
= make_edge (bb
, new_bb
, EDGE_FALLTHRU
);
3252 e
->count
= bb
->count
;
3253 e
->probability
= REG_BR_PROB_BASE
;
3258 tree dst_nr
= build_int_cst (integer_type_node
, dst_r
->index
);
3260 fn
= builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES
);
3261 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3262 x
= gimple_build_call (fn
, 2, dst_nr
, src_nr
);
3263 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3265 /* Update the flags for the outgoing edge. */
3266 e
= single_succ_edge (bb
);
3267 gcc_assert (e
->flags
& EDGE_EH
);
3268 e
->flags
= (e
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
3270 /* If there are no more EH users of the landing pad, delete it. */
3271 FOR_EACH_EDGE (e
, ei
, e
->dest
->preds
)
3272 if (e
->flags
& EDGE_EH
)
3276 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
3277 remove_eh_landing_pad (lp
);
3287 /* When we don't have a destination region, this exception escapes
3288 up the call chain. We resolve this by generating a call to the
3289 _Unwind_Resume library function. */
3291 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3292 with no arguments for C++ and Java. Check for that. */
3293 if (src_r
->use_cxa_end_cleanup
)
3295 fn
= builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP
);
3296 x
= gimple_build_call (fn
, 0);
3297 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3301 fn
= builtin_decl_implicit (BUILT_IN_EH_POINTER
);
3302 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3303 x
= gimple_build_call (fn
, 1, src_nr
);
3304 var
= create_tmp_var (ptr_type_node
);
3305 var
= make_ssa_name (var
, x
);
3306 gimple_call_set_lhs (x
, var
);
3307 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3309 fn
= builtin_decl_implicit (BUILT_IN_UNWIND_RESUME
);
3310 x
= gimple_build_call (fn
, 1, var
);
3311 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3314 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3317 gsi_remove (&gsi
, true);
3324 const pass_data pass_data_lower_resx
=
3326 GIMPLE_PASS
, /* type */
3328 OPTGROUP_NONE
, /* optinfo_flags */
3329 TV_TREE_EH
, /* tv_id */
3330 PROP_gimple_lcf
, /* properties_required */
3331 0, /* properties_provided */
3332 0, /* properties_destroyed */
3333 0, /* todo_flags_start */
3334 0, /* todo_flags_finish */
3337 class pass_lower_resx
: public gimple_opt_pass
3340 pass_lower_resx (gcc::context
*ctxt
)
3341 : gimple_opt_pass (pass_data_lower_resx
, ctxt
)
3344 /* opt_pass methods: */
3345 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3346 virtual unsigned int execute (function
*);
3348 }; // class pass_lower_resx
3351 pass_lower_resx::execute (function
*fun
)
3354 bool dominance_invalidated
= false;
3355 bool any_rewritten
= false;
3357 hash_map
<eh_region
, tree
> mnt_map
;
3359 FOR_EACH_BB_FN (bb
, fun
)
3361 gimple last
= last_stmt (bb
);
3362 if (last
&& is_gimple_resx (last
))
3364 dominance_invalidated
|=
3365 lower_resx (bb
, as_a
<gresx
*> (last
), &mnt_map
);
3366 any_rewritten
= true;
3370 if (dominance_invalidated
)
3372 free_dominance_info (CDI_DOMINATORS
);
3373 free_dominance_info (CDI_POST_DOMINATORS
);
3376 return any_rewritten
? TODO_update_ssa_only_virtuals
: 0;
3382 make_pass_lower_resx (gcc::context
*ctxt
)
3384 return new pass_lower_resx (ctxt
);
3387 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3391 optimize_clobbers (basic_block bb
)
3393 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3394 bool any_clobbers
= false;
3395 bool seen_stack_restore
= false;
3399 /* Only optimize anything if the bb contains at least one clobber,
3400 ends with resx (checked by caller), optionally contains some
3401 debug stmts or labels, or at most one __builtin_stack_restore
3402 call, and has an incoming EH edge. */
3403 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3405 gimple stmt
= gsi_stmt (gsi
);
3406 if (is_gimple_debug (stmt
))
3408 if (gimple_clobber_p (stmt
))
3410 any_clobbers
= true;
3413 if (!seen_stack_restore
3414 && gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
3416 seen_stack_restore
= true;
3419 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3425 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3426 if (e
->flags
& EDGE_EH
)
3430 gsi
= gsi_last_bb (bb
);
3431 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3433 gimple stmt
= gsi_stmt (gsi
);
3434 if (!gimple_clobber_p (stmt
))
3436 unlink_stmt_vdef (stmt
);
3437 gsi_remove (&gsi
, true);
3438 release_defs (stmt
);
3442 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3443 internal throw to successor BB. */
3446 sink_clobbers (basic_block bb
)
3450 gimple_stmt_iterator gsi
, dgsi
;
3452 bool any_clobbers
= false;
3455 /* Only optimize if BB has a single EH successor and
3456 all predecessor edges are EH too. */
3457 if (!single_succ_p (bb
)
3458 || (single_succ_edge (bb
)->flags
& EDGE_EH
) == 0)
3461 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3463 if ((e
->flags
& EDGE_EH
) == 0)
3467 /* And BB contains only CLOBBER stmts before the final
3469 gsi
= gsi_last_bb (bb
);
3470 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3472 gimple stmt
= gsi_stmt (gsi
);
3473 if (is_gimple_debug (stmt
))
3475 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3477 if (!gimple_clobber_p (stmt
))
3479 any_clobbers
= true;
3484 edge succe
= single_succ_edge (bb
);
3485 succbb
= succe
->dest
;
3487 /* See if there is a virtual PHI node to take an updated virtual
3490 tree vuse
= NULL_TREE
;
3491 for (gphi_iterator gpi
= gsi_start_phis (succbb
);
3492 !gsi_end_p (gpi
); gsi_next (&gpi
))
3494 tree res
= gimple_phi_result (gpi
.phi ());
3495 if (virtual_operand_p (res
))
3503 dgsi
= gsi_after_labels (succbb
);
3504 gsi
= gsi_last_bb (bb
);
3505 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3507 gimple stmt
= gsi_stmt (gsi
);
3509 if (is_gimple_debug (stmt
))
3511 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3513 lhs
= gimple_assign_lhs (stmt
);
3514 /* Unfortunately we don't have dominance info updated at this
3515 point, so checking if
3516 dominated_by_p (CDI_DOMINATORS, succbb,
3517 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3518 would be too costly. Thus, avoid sinking any clobbers that
3519 refer to non-(D) SSA_NAMEs. */
3520 if (TREE_CODE (lhs
) == MEM_REF
3521 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
3522 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0)))
3524 unlink_stmt_vdef (stmt
);
3525 gsi_remove (&gsi
, true);
3526 release_defs (stmt
);
3530 /* As we do not change stmt order when sinking across a
3531 forwarder edge we can keep virtual operands in place. */
3532 gsi_remove (&gsi
, false);
3533 gsi_insert_before (&dgsi
, stmt
, GSI_NEW_STMT
);
3535 /* But adjust virtual operands if we sunk across a PHI node. */
3539 imm_use_iterator iter
;
3540 use_operand_p use_p
;
3541 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vuse
)
3542 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
3543 SET_USE (use_p
, gimple_vdef (stmt
));
3544 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
))
3546 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt
)) = 1;
3547 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 0;
3549 /* Adjust the incoming virtual operand. */
3550 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi
, succe
), gimple_vuse (stmt
));
3551 SET_USE (gimple_vuse_op (stmt
), vuse
);
3553 /* If there isn't a single predecessor but no virtual PHI node
3554 arrange for virtual operands to be renamed. */
3555 else if (gimple_vuse_op (stmt
) != NULL_USE_OPERAND_P
3556 && !single_pred_p (succbb
))
3558 /* In this case there will be no use of the VDEF of this stmt.
3559 ??? Unless this is a secondary opportunity and we have not
3560 removed unreachable blocks yet, so we cannot assert this.
3561 Which also means we will end up renaming too many times. */
3562 SET_USE (gimple_vuse_op (stmt
), gimple_vop (cfun
));
3563 mark_virtual_operands_for_renaming (cfun
);
3564 todo
|= TODO_update_ssa_only_virtuals
;
3571 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3572 we have found some duplicate labels and removed some edges. */
3575 lower_eh_dispatch (basic_block src
, geh_dispatch
*stmt
)
3577 gimple_stmt_iterator gsi
;
3582 bool redirected
= false;
3584 region_nr
= gimple_eh_dispatch_region (stmt
);
3585 r
= get_eh_region_from_number (region_nr
);
3587 gsi
= gsi_last_bb (src
);
3593 auto_vec
<tree
> labels
;
3594 tree default_label
= NULL
;
3598 hash_set
<tree
> seen_values
;
3600 /* Collect the labels for a switch. Zero the post_landing_pad
3601 field becase we'll no longer have anything keeping these labels
3602 in existence and the optimizer will be free to merge these
3604 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
3606 tree tp_node
, flt_node
, lab
= c
->label
;
3607 bool have_label
= false;
3610 tp_node
= c
->type_list
;
3611 flt_node
= c
->filter_list
;
3613 if (tp_node
== NULL
)
3615 default_label
= lab
;
3620 /* Filter out duplicate labels that arise when this handler
3621 is shadowed by an earlier one. When no labels are
3622 attached to the handler anymore, we remove
3623 the corresponding edge and then we delete unreachable
3624 blocks at the end of this pass. */
3625 if (! seen_values
.contains (TREE_VALUE (flt_node
)))
3627 tree t
= build_case_label (TREE_VALUE (flt_node
),
3629 labels
.safe_push (t
);
3630 seen_values
.add (TREE_VALUE (flt_node
));
3634 tp_node
= TREE_CHAIN (tp_node
);
3635 flt_node
= TREE_CHAIN (flt_node
);
3640 remove_edge (find_edge (src
, label_to_block (lab
)));
3645 /* Clean up the edge flags. */
3646 FOR_EACH_EDGE (e
, ei
, src
->succs
)
3648 if (e
->flags
& EDGE_FALLTHRU
)
3650 /* If there was no catch-all, use the fallthru edge. */
3651 if (default_label
== NULL
)
3652 default_label
= gimple_block_label (e
->dest
);
3653 e
->flags
&= ~EDGE_FALLTHRU
;
3656 gcc_assert (default_label
!= NULL
);
3658 /* Don't generate a switch if there's only a default case.
3659 This is common in the form of try { A; } catch (...) { B; }. */
3660 if (!labels
.exists ())
3662 e
= single_succ_edge (src
);
3663 e
->flags
|= EDGE_FALLTHRU
;
3667 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3668 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3670 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3671 filter
= make_ssa_name (filter
, x
);
3672 gimple_call_set_lhs (x
, filter
);
3673 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3675 /* Turn the default label into a default case. */
3676 default_label
= build_case_label (NULL
, NULL
, default_label
);
3677 sort_case_labels (labels
);
3679 x
= gimple_build_switch (filter
, default_label
, labels
);
3680 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3685 case ERT_ALLOWED_EXCEPTIONS
:
3687 edge b_e
= BRANCH_EDGE (src
);
3688 edge f_e
= FALLTHRU_EDGE (src
);
3690 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3691 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3693 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3694 filter
= make_ssa_name (filter
, x
);
3695 gimple_call_set_lhs (x
, filter
);
3696 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3698 r
->u
.allowed
.label
= NULL
;
3699 x
= gimple_build_cond (EQ_EXPR
, filter
,
3700 build_int_cst (TREE_TYPE (filter
),
3701 r
->u
.allowed
.filter
),
3702 NULL_TREE
, NULL_TREE
);
3703 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3705 b_e
->flags
= b_e
->flags
| EDGE_TRUE_VALUE
;
3706 f_e
->flags
= (f_e
->flags
& ~EDGE_FALLTHRU
) | EDGE_FALSE_VALUE
;
3714 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3715 gsi_remove (&gsi
, true);
3721 const pass_data pass_data_lower_eh_dispatch
=
3723 GIMPLE_PASS
, /* type */
3724 "ehdisp", /* name */
3725 OPTGROUP_NONE
, /* optinfo_flags */
3726 TV_TREE_EH
, /* tv_id */
3727 PROP_gimple_lcf
, /* properties_required */
3728 0, /* properties_provided */
3729 0, /* properties_destroyed */
3730 0, /* todo_flags_start */
3731 0, /* todo_flags_finish */
3734 class pass_lower_eh_dispatch
: public gimple_opt_pass
3737 pass_lower_eh_dispatch (gcc::context
*ctxt
)
3738 : gimple_opt_pass (pass_data_lower_eh_dispatch
, ctxt
)
3741 /* opt_pass methods: */
3742 virtual bool gate (function
*fun
) { return fun
->eh
->region_tree
!= NULL
; }
3743 virtual unsigned int execute (function
*);
3745 }; // class pass_lower_eh_dispatch
3748 pass_lower_eh_dispatch::execute (function
*fun
)
3752 bool redirected
= false;
3754 assign_filter_values ();
3756 FOR_EACH_BB_FN (bb
, fun
)
3758 gimple last
= last_stmt (bb
);
3761 if (gimple_code (last
) == GIMPLE_EH_DISPATCH
)
3763 redirected
|= lower_eh_dispatch (bb
,
3764 as_a
<geh_dispatch
*> (last
));
3765 flags
|= TODO_update_ssa_only_virtuals
;
3767 else if (gimple_code (last
) == GIMPLE_RESX
)
3769 if (stmt_can_throw_external (last
))
3770 optimize_clobbers (bb
);
3772 flags
|= sink_clobbers (bb
);
3777 delete_unreachable_blocks ();
3784 make_pass_lower_eh_dispatch (gcc::context
*ctxt
)
3786 return new pass_lower_eh_dispatch (ctxt
);
3789 /* Walk statements, see what regions and, optionally, landing pads
3790 are really referenced.
3792 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3793 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3795 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3798 The caller is responsible for freeing the returned sbitmaps. */
3801 mark_reachable_handlers (sbitmap
*r_reachablep
, sbitmap
*lp_reachablep
)
3803 sbitmap r_reachable
, lp_reachable
;
3805 bool mark_landing_pads
= (lp_reachablep
!= NULL
);
3806 gcc_checking_assert (r_reachablep
!= NULL
);
3808 r_reachable
= sbitmap_alloc (cfun
->eh
->region_array
->length ());
3809 bitmap_clear (r_reachable
);
3810 *r_reachablep
= r_reachable
;
3812 if (mark_landing_pads
)
3814 lp_reachable
= sbitmap_alloc (cfun
->eh
->lp_array
->length ());
3815 bitmap_clear (lp_reachable
);
3816 *lp_reachablep
= lp_reachable
;
3819 lp_reachable
= NULL
;
3821 FOR_EACH_BB_FN (bb
, cfun
)
3823 gimple_stmt_iterator gsi
;
3825 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3827 gimple stmt
= gsi_stmt (gsi
);
3829 if (mark_landing_pads
)
3831 int lp_nr
= lookup_stmt_eh_lp (stmt
);
3833 /* Negative LP numbers are MUST_NOT_THROW regions which
3834 are not considered BB enders. */
3836 bitmap_set_bit (r_reachable
, -lp_nr
);
3838 /* Positive LP numbers are real landing pads, and BB enders. */
3841 gcc_assert (gsi_one_before_end_p (gsi
));
3842 eh_region region
= get_eh_region_from_lp_number (lp_nr
);
3843 bitmap_set_bit (r_reachable
, region
->index
);
3844 bitmap_set_bit (lp_reachable
, lp_nr
);
3848 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3849 switch (gimple_code (stmt
))
3852 bitmap_set_bit (r_reachable
,
3853 gimple_resx_region (as_a
<gresx
*> (stmt
)));
3855 case GIMPLE_EH_DISPATCH
:
3856 bitmap_set_bit (r_reachable
,
3857 gimple_eh_dispatch_region (
3858 as_a
<geh_dispatch
*> (stmt
)));
3861 if (gimple_call_builtin_p (stmt
, BUILT_IN_EH_COPY_VALUES
))
3862 for (int i
= 0; i
< 2; ++i
)
3864 tree rt
= gimple_call_arg (stmt
, i
);
3865 HOST_WIDE_INT ri
= tree_to_shwi (rt
);
3867 gcc_assert (ri
= (int)ri
);
3868 bitmap_set_bit (r_reachable
, ri
);
3878 /* Remove unreachable handlers and unreachable landing pads. */
3881 remove_unreachable_handlers (void)
3883 sbitmap r_reachable
, lp_reachable
;
3888 mark_reachable_handlers (&r_reachable
, &lp_reachable
);
3892 fprintf (dump_file
, "Before removal of unreachable regions:\n");
3893 dump_eh_tree (dump_file
, cfun
);
3894 fprintf (dump_file
, "Reachable regions: ");
3895 dump_bitmap_file (dump_file
, r_reachable
);
3896 fprintf (dump_file
, "Reachable landing pads: ");
3897 dump_bitmap_file (dump_file
, lp_reachable
);
3902 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
3903 if (region
&& !bitmap_bit_p (r_reachable
, region
->index
))
3905 "Removing unreachable region %d\n",
3909 remove_unreachable_eh_regions (r_reachable
);
3911 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
3912 if (lp
&& !bitmap_bit_p (lp_reachable
, lp
->index
))
3916 "Removing unreachable landing pad %d\n",
3918 remove_eh_landing_pad (lp
);
3923 fprintf (dump_file
, "\n\nAfter removal of unreachable regions:\n");
3924 dump_eh_tree (dump_file
, cfun
);
3925 fprintf (dump_file
, "\n\n");
3928 sbitmap_free (r_reachable
);
3929 sbitmap_free (lp_reachable
);
3931 #ifdef ENABLE_CHECKING
3932 verify_eh_tree (cfun
);
3936 /* Remove unreachable handlers if any landing pads have been removed after
3937 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3940 maybe_remove_unreachable_handlers (void)
3945 if (cfun
->eh
== NULL
)
3948 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
3949 if (lp
&& lp
->post_landing_pad
)
3951 if (label_to_block (lp
->post_landing_pad
) == NULL
)
3953 remove_unreachable_handlers ();
3959 /* Remove regions that do not have landing pads. This assumes
3960 that remove_unreachable_handlers has already been run, and
3961 that we've just manipulated the landing pads since then.
3963 Preserve regions with landing pads and regions that prevent
3964 exceptions from propagating further, even if these regions
3965 are not reachable. */
3968 remove_unreachable_handlers_no_lp (void)
3971 sbitmap r_reachable
;
3974 mark_reachable_handlers (&r_reachable
, /*lp_reachablep=*/NULL
);
3976 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
3981 if (region
->landing_pads
!= NULL
3982 || region
->type
== ERT_MUST_NOT_THROW
)
3983 bitmap_set_bit (r_reachable
, region
->index
);
3986 && !bitmap_bit_p (r_reachable
, region
->index
))
3988 "Removing unreachable region %d\n",
3992 remove_unreachable_eh_regions (r_reachable
);
3994 sbitmap_free (r_reachable
);
3997 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3998 optimisticaly split all sorts of edges, including EH edges. The
3999 optimization passes in between may not have needed them; if not,
4000 we should undo the split.
4002 Recognize this case by having one EH edge incoming to the BB and
4003 one normal edge outgoing; BB should be empty apart from the
4004 post_landing_pad label.
4006 Note that this is slightly different from the empty handler case
4007 handled by cleanup_empty_eh, in that the actual handler may yet
4008 have actual code but the landing pad has been separated from the
4009 handler. As such, cleanup_empty_eh relies on this transformation
4010 having been done first. */
4013 unsplit_eh (eh_landing_pad lp
)
4015 basic_block bb
= label_to_block (lp
->post_landing_pad
);
4016 gimple_stmt_iterator gsi
;
4019 /* Quickly check the edge counts on BB for singularity. */
4020 if (!single_pred_p (bb
) || !single_succ_p (bb
))
4022 e_in
= single_pred_edge (bb
);
4023 e_out
= single_succ_edge (bb
);
4025 /* Input edge must be EH and output edge must be normal. */
4026 if ((e_in
->flags
& EDGE_EH
) == 0 || (e_out
->flags
& EDGE_EH
) != 0)
4029 /* The block must be empty except for the labels and debug insns. */
4030 gsi
= gsi_after_labels (bb
);
4031 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4032 gsi_next_nondebug (&gsi
);
4033 if (!gsi_end_p (gsi
))
4036 /* The destination block must not already have a landing pad
4037 for a different region. */
4038 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4040 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4046 lab
= gimple_label_label (label_stmt
);
4047 lp_nr
= EH_LANDING_PAD_NR (lab
);
4048 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4052 /* The new destination block must not already be a destination of
4053 the source block, lest we merge fallthru and eh edges and get
4054 all sorts of confused. */
4055 if (find_edge (e_in
->src
, e_out
->dest
))
4058 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4059 thought this should have been cleaned up by a phicprop pass, but
4060 that doesn't appear to handle virtuals. Propagate by hand. */
4061 if (!gimple_seq_empty_p (phi_nodes (bb
)))
4063 for (gphi_iterator gpi
= gsi_start_phis (bb
); !gsi_end_p (gpi
); )
4066 gphi
*phi
= gpi
.phi ();
4067 tree lhs
= gimple_phi_result (phi
);
4068 tree rhs
= gimple_phi_arg_def (phi
, 0);
4069 use_operand_p use_p
;
4070 imm_use_iterator iter
;
4072 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
4074 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
4075 SET_USE (use_p
, rhs
);
4078 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4079 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
) = 1;
4081 remove_phi_node (&gpi
, true);
4085 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4086 fprintf (dump_file
, "Unsplit EH landing pad %d to block %i.\n",
4087 lp
->index
, e_out
->dest
->index
);
4089 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4090 a successor edge, humor it. But do the real CFG change with the
4091 predecessor of E_OUT in order to preserve the ordering of arguments
4092 to the PHI nodes in E_OUT->DEST. */
4093 redirect_eh_edge_1 (e_in
, e_out
->dest
, false);
4094 redirect_edge_pred (e_out
, e_in
->src
);
4095 e_out
->flags
= e_in
->flags
;
4096 e_out
->probability
= e_in
->probability
;
4097 e_out
->count
= e_in
->count
;
4103 /* Examine each landing pad block and see if it matches unsplit_eh. */
4106 unsplit_all_eh (void)
4108 bool changed
= false;
4112 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4114 changed
|= unsplit_eh (lp
);
4119 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4120 to OLD_BB to NEW_BB; return true on success, false on failure.
4122 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4123 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4124 Virtual PHIs may be deleted and marked for renaming. */
4127 cleanup_empty_eh_merge_phis (basic_block new_bb
, basic_block old_bb
,
4128 edge old_bb_out
, bool change_region
)
4130 gphi_iterator ngsi
, ogsi
;
4133 bitmap ophi_handled
;
4135 /* The destination block must not be a regular successor for any
4136 of the preds of the landing pad. Thus, avoid turning
4146 which CFG verification would choke on. See PR45172 and PR51089. */
4147 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4148 if (find_edge (e
->src
, new_bb
))
4151 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4152 redirect_edge_var_map_clear (e
);
4154 ophi_handled
= BITMAP_ALLOC (NULL
);
4156 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4157 for the edges we're going to move. */
4158 for (ngsi
= gsi_start_phis (new_bb
); !gsi_end_p (ngsi
); gsi_next (&ngsi
))
4160 gphi
*ophi
, *nphi
= ngsi
.phi ();
4163 nresult
= gimple_phi_result (nphi
);
4164 nop
= gimple_phi_arg_def (nphi
, old_bb_out
->dest_idx
);
4166 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4167 the source ssa_name. */
4169 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4172 if (gimple_phi_result (ophi
) == nop
)
4177 /* If we did find the corresponding PHI, copy those inputs. */
4180 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4181 if (!has_single_use (nop
))
4183 imm_use_iterator imm_iter
;
4184 use_operand_p use_p
;
4186 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, nop
)
4188 if (!gimple_debug_bind_p (USE_STMT (use_p
))
4189 && (gimple_code (USE_STMT (use_p
)) != GIMPLE_PHI
4190 || gimple_bb (USE_STMT (use_p
)) != new_bb
))
4194 bitmap_set_bit (ophi_handled
, SSA_NAME_VERSION (nop
));
4195 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4200 if ((e
->flags
& EDGE_EH
) == 0)
4202 oop
= gimple_phi_arg_def (ophi
, e
->dest_idx
);
4203 oloc
= gimple_phi_arg_location (ophi
, e
->dest_idx
);
4204 redirect_edge_var_map_add (e
, nresult
, oop
, oloc
);
4207 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4208 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4209 variable is unchanged from input to the block and we can simply
4210 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4214 = gimple_phi_arg_location (nphi
, old_bb_out
->dest_idx
);
4215 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4216 redirect_edge_var_map_add (e
, nresult
, nop
, nloc
);
4220 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4221 we don't know what values from the other edges into NEW_BB to use. */
4222 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4224 gphi
*ophi
= ogsi
.phi ();
4225 tree oresult
= gimple_phi_result (ophi
);
4226 if (!bitmap_bit_p (ophi_handled
, SSA_NAME_VERSION (oresult
)))
4230 /* Finally, move the edges and update the PHIs. */
4231 for (ei
= ei_start (old_bb
->preds
); (e
= ei_safe_edge (ei
)); )
4232 if (e
->flags
& EDGE_EH
)
4234 /* ??? CFG manipluation routines do not try to update loop
4235 form on edge redirection. Do so manually here for now. */
4236 /* If we redirect a loop entry or latch edge that will either create
4237 a multiple entry loop or rotate the loop. If the loops merge
4238 we may have created a loop with multiple latches.
4239 All of this isn't easily fixed thus cancel the affected loop
4240 and mark the other loop as possibly having multiple latches. */
4241 if (e
->dest
== e
->dest
->loop_father
->header
)
4243 mark_loop_for_removal (e
->dest
->loop_father
);
4244 new_bb
->loop_father
->latch
= NULL
;
4245 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
);
4247 redirect_eh_edge_1 (e
, new_bb
, change_region
);
4248 redirect_edge_succ (e
, new_bb
);
4249 flush_pending_stmts (e
);
4254 BITMAP_FREE (ophi_handled
);
4258 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4259 redirect_edge_var_map_clear (e
);
4260 BITMAP_FREE (ophi_handled
);
4264 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4265 old region to NEW_REGION at BB. */
4268 cleanup_empty_eh_move_lp (basic_block bb
, edge e_out
,
4269 eh_landing_pad lp
, eh_region new_region
)
4271 gimple_stmt_iterator gsi
;
4274 for (pp
= &lp
->region
->landing_pads
; *pp
!= lp
; pp
= &(*pp
)->next_lp
)
4278 lp
->region
= new_region
;
4279 lp
->next_lp
= new_region
->landing_pads
;
4280 new_region
->landing_pads
= lp
;
4282 /* Delete the RESX that was matched within the empty handler block. */
4283 gsi
= gsi_last_bb (bb
);
4284 unlink_stmt_vdef (gsi_stmt (gsi
));
4285 gsi_remove (&gsi
, true);
4287 /* Clean up E_OUT for the fallthru. */
4288 e_out
->flags
= (e_out
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
4289 e_out
->probability
= REG_BR_PROB_BASE
;
4292 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4293 unsplitting than unsplit_eh was prepared to handle, e.g. when
4294 multiple incoming edges and phis are involved. */
4297 cleanup_empty_eh_unsplit (basic_block bb
, edge e_out
, eh_landing_pad lp
)
4299 gimple_stmt_iterator gsi
;
4302 /* We really ought not have totally lost everything following
4303 a landing pad label. Given that BB is empty, there had better
4305 gcc_assert (e_out
!= NULL
);
4307 /* The destination block must not already have a landing pad
4308 for a different region. */
4310 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4312 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4317 lab
= gimple_label_label (stmt
);
4318 lp_nr
= EH_LANDING_PAD_NR (lab
);
4319 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4323 /* Attempt to move the PHIs into the successor block. */
4324 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, false))
4326 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4328 "Unsplit EH landing pad %d to block %i "
4329 "(via cleanup_empty_eh).\n",
4330 lp
->index
, e_out
->dest
->index
);
4337 /* Return true if edge E_FIRST is part of an empty infinite loop
4338 or leads to such a loop through a series of single successor
4342 infinite_empty_loop_p (edge e_first
)
4344 bool inf_loop
= false;
4347 if (e_first
->dest
== e_first
->src
)
4350 e_first
->src
->aux
= (void *) 1;
4351 for (e
= e_first
; single_succ_p (e
->dest
); e
= single_succ_edge (e
->dest
))
4353 gimple_stmt_iterator gsi
;
4359 e
->dest
->aux
= (void *) 1;
4360 gsi
= gsi_after_labels (e
->dest
);
4361 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4362 gsi_next_nondebug (&gsi
);
4363 if (!gsi_end_p (gsi
))
4366 e_first
->src
->aux
= NULL
;
4367 for (e
= e_first
; e
->dest
->aux
; e
= single_succ_edge (e
->dest
))
4368 e
->dest
->aux
= NULL
;
4373 /* Examine the block associated with LP to determine if it's an empty
4374 handler for its EH region. If so, attempt to redirect EH edges to
4375 an outer region. Return true the CFG was updated in any way. This
4376 is similar to jump forwarding, just across EH edges. */
4379 cleanup_empty_eh (eh_landing_pad lp
)
4381 basic_block bb
= label_to_block (lp
->post_landing_pad
);
4382 gimple_stmt_iterator gsi
;
4384 eh_region new_region
;
4387 bool has_non_eh_pred
;
4391 /* There can be zero or one edges out of BB. This is the quickest test. */
4392 switch (EDGE_COUNT (bb
->succs
))
4398 e_out
= single_succ_edge (bb
);
4404 resx
= last_stmt (bb
);
4405 if (resx
&& is_gimple_resx (resx
))
4407 if (stmt_can_throw_external (resx
))
4408 optimize_clobbers (bb
);
4409 else if (sink_clobbers (bb
))
4413 gsi
= gsi_after_labels (bb
);
4415 /* Make sure to skip debug statements. */
4416 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4417 gsi_next_nondebug (&gsi
);
4419 /* If the block is totally empty, look for more unsplitting cases. */
4420 if (gsi_end_p (gsi
))
4422 /* For the degenerate case of an infinite loop bail out.
4423 If bb has no successors and is totally empty, which can happen e.g.
4424 because of incorrect noreturn attribute, bail out too. */
4426 || infinite_empty_loop_p (e_out
))
4429 return ret
| cleanup_empty_eh_unsplit (bb
, e_out
, lp
);
4432 /* The block should consist only of a single RESX statement, modulo a
4433 preceding call to __builtin_stack_restore if there is no outgoing
4434 edge, since the call can be eliminated in this case. */
4435 resx
= gsi_stmt (gsi
);
4436 if (!e_out
&& gimple_call_builtin_p (resx
, BUILT_IN_STACK_RESTORE
))
4439 resx
= gsi_stmt (gsi
);
4441 if (!is_gimple_resx (resx
))
4443 gcc_assert (gsi_one_before_end_p (gsi
));
4445 /* Determine if there are non-EH edges, or resx edges into the handler. */
4446 has_non_eh_pred
= false;
4447 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4448 if (!(e
->flags
& EDGE_EH
))
4449 has_non_eh_pred
= true;
4451 /* Find the handler that's outer of the empty handler by looking at
4452 where the RESX instruction was vectored. */
4453 new_lp_nr
= lookup_stmt_eh_lp (resx
);
4454 new_region
= get_eh_region_from_lp_number (new_lp_nr
);
4456 /* If there's no destination region within the current function,
4457 redirection is trivial via removing the throwing statements from
4458 the EH region, removing the EH edges, and allowing the block
4459 to go unreachable. */
4460 if (new_region
== NULL
)
4462 gcc_assert (e_out
== NULL
);
4463 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4464 if (e
->flags
& EDGE_EH
)
4466 gimple stmt
= last_stmt (e
->src
);
4467 remove_stmt_from_eh_lp (stmt
);
4475 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4476 to handle the abort and allow the blocks to go unreachable. */
4477 if (new_region
->type
== ERT_MUST_NOT_THROW
)
4479 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4480 if (e
->flags
& EDGE_EH
)
4482 gimple stmt
= last_stmt (e
->src
);
4483 remove_stmt_from_eh_lp (stmt
);
4484 add_stmt_to_eh_lp (stmt
, new_lp_nr
);
4492 /* Try to redirect the EH edges and merge the PHIs into the destination
4493 landing pad block. If the merge succeeds, we'll already have redirected
4494 all the EH edges. The handler itself will go unreachable if there were
4496 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, true))
4499 /* Finally, if all input edges are EH edges, then we can (potentially)
4500 reduce the number of transfers from the runtime by moving the landing
4501 pad from the original region to the new region. This is a win when
4502 we remove the last CLEANUP region along a particular exception
4503 propagation path. Since nothing changes except for the region with
4504 which the landing pad is associated, the PHI nodes do not need to be
4506 if (!has_non_eh_pred
)
4508 cleanup_empty_eh_move_lp (bb
, e_out
, lp
, new_region
);
4509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4510 fprintf (dump_file
, "Empty EH handler %i moved to EH region %i.\n",
4511 lp
->index
, new_region
->index
);
4513 /* ??? The CFG didn't change, but we may have rendered the
4514 old EH region unreachable. Trigger a cleanup there. */
4521 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4522 fprintf (dump_file
, "Empty EH handler %i removed.\n", lp
->index
);
4523 remove_eh_landing_pad (lp
);
4527 /* Do a post-order traversal of the EH region tree. Examine each
4528 post_landing_pad block and see if we can eliminate it as empty. */
4531 cleanup_all_empty_eh (void)
4533 bool changed
= false;
4537 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4539 changed
|= cleanup_empty_eh (lp
);
4544 /* Perform cleanups and lowering of exception handling
4545 1) cleanups regions with handlers doing nothing are optimized out
4546 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4547 3) Info about regions that are containing instructions, and regions
4548 reachable via local EH edges is collected
4549 4) Eh tree is pruned for regions no longer necessary.
4551 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4552 Unify those that have the same failure decl and locus.
4556 execute_cleanup_eh_1 (void)
4558 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4559 looking up unreachable landing pads. */
4560 remove_unreachable_handlers ();
4562 /* Watch out for the region tree vanishing due to all unreachable. */
4563 if (cfun
->eh
->region_tree
)
4565 bool changed
= false;
4568 changed
|= unsplit_all_eh ();
4569 changed
|= cleanup_all_empty_eh ();
4573 free_dominance_info (CDI_DOMINATORS
);
4574 free_dominance_info (CDI_POST_DOMINATORS
);
4576 /* We delayed all basic block deletion, as we may have performed
4577 cleanups on EH edges while non-EH edges were still present. */
4578 delete_unreachable_blocks ();
4580 /* We manipulated the landing pads. Remove any region that no
4581 longer has a landing pad. */
4582 remove_unreachable_handlers_no_lp ();
4584 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
4593 const pass_data pass_data_cleanup_eh
=
4595 GIMPLE_PASS
, /* type */
4596 "ehcleanup", /* name */
4597 OPTGROUP_NONE
, /* optinfo_flags */
4598 TV_TREE_EH
, /* tv_id */
4599 PROP_gimple_lcf
, /* properties_required */
4600 0, /* properties_provided */
4601 0, /* properties_destroyed */
4602 0, /* todo_flags_start */
4603 0, /* todo_flags_finish */
4606 class pass_cleanup_eh
: public gimple_opt_pass
4609 pass_cleanup_eh (gcc::context
*ctxt
)
4610 : gimple_opt_pass (pass_data_cleanup_eh
, ctxt
)
4613 /* opt_pass methods: */
4614 opt_pass
* clone () { return new pass_cleanup_eh (m_ctxt
); }
4615 virtual bool gate (function
*fun
)
4617 return fun
->eh
!= NULL
&& fun
->eh
->region_tree
!= NULL
;
4620 virtual unsigned int execute (function
*);
4622 }; // class pass_cleanup_eh
4625 pass_cleanup_eh::execute (function
*fun
)
4627 int ret
= execute_cleanup_eh_1 ();
4629 /* If the function no longer needs an EH personality routine
4630 clear it. This exposes cross-language inlining opportunities
4631 and avoids references to a never defined personality routine. */
4632 if (DECL_FUNCTION_PERSONALITY (current_function_decl
)
4633 && function_needs_eh_personality (fun
) != eh_personality_lang
)
4634 DECL_FUNCTION_PERSONALITY (current_function_decl
) = NULL_TREE
;
4642 make_pass_cleanup_eh (gcc::context
*ctxt
)
4644 return new pass_cleanup_eh (ctxt
);
4647 /* Verify that BB containing STMT as the last statement, has precisely the
4648 edge that make_eh_edges would create. */
4651 verify_eh_edges (gimple stmt
)
4653 basic_block bb
= gimple_bb (stmt
);
4654 eh_landing_pad lp
= NULL
;
4659 lp_nr
= lookup_stmt_eh_lp (stmt
);
4661 lp
= get_eh_landing_pad_from_number (lp_nr
);
4664 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4666 if (e
->flags
& EDGE_EH
)
4670 error ("BB %i has multiple EH edges", bb
->index
);
4682 error ("BB %i can not throw but has an EH edge", bb
->index
);
4688 if (!stmt_could_throw_p (stmt
))
4690 error ("BB %i last statement has incorrectly set lp", bb
->index
);
4694 if (eh_edge
== NULL
)
4696 error ("BB %i is missing an EH edge", bb
->index
);
4700 if (eh_edge
->dest
!= label_to_block (lp
->post_landing_pad
))
4702 error ("Incorrect EH edge %i->%i", bb
->index
, eh_edge
->dest
->index
);
4709 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4712 verify_eh_dispatch_edge (geh_dispatch
*stmt
)
4716 basic_block src
, dst
;
4717 bool want_fallthru
= true;
4721 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
4722 src
= gimple_bb (stmt
);
4724 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4725 gcc_assert (e
->aux
== NULL
);
4730 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
4732 dst
= label_to_block (c
->label
);
4733 e
= find_edge (src
, dst
);
4736 error ("BB %i is missing an edge", src
->index
);
4741 /* A catch-all handler doesn't have a fallthru. */
4742 if (c
->type_list
== NULL
)
4744 want_fallthru
= false;
4750 case ERT_ALLOWED_EXCEPTIONS
:
4751 dst
= label_to_block (r
->u
.allowed
.label
);
4752 e
= find_edge (src
, dst
);
4755 error ("BB %i is missing an edge", src
->index
);
4766 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4768 if (e
->flags
& EDGE_FALLTHRU
)
4770 if (fall_edge
!= NULL
)
4772 error ("BB %i too many fallthru edges", src
->index
);
4781 error ("BB %i has incorrect edge", src
->index
);
4785 if ((fall_edge
!= NULL
) ^ want_fallthru
)
4787 error ("BB %i has incorrect fallthru edge", src
->index
);