2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-eh.c
blob0f4d17a44024db9a746ce1a783fe77554ad4bf24
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "input.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "rtl.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "except.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "cfganal.h"
47 #include "cfgcleanup.h"
48 #include "basic-block.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "tree-eh.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "plugin-api.h"
58 #include "ipa-ref.h"
59 #include "cgraph.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-into-ssa.h"
66 #include "tree-ssa.h"
67 #include "tree-inline.h"
68 #include "tree-pass.h"
69 #include "langhooks.h"
70 #include "diagnostic-core.h"
71 #include "target.h"
72 #include "cfgloop.h"
73 #include "gimple-low.h"
75 /* In some instances a tree and a gimple need to be stored in a same table,
76 i.e. in hash tables. This is a structure to do this. */
77 typedef union {tree *tp; tree t; gimple g;} treemple;
79 /* Misc functions used in this file. */
81 /* Remember and lookup EH landing pad data for arbitrary statements.
82 Really this means any statement that could_throw_p. We could
83 stuff this information into the stmt_ann data structure, but:
85 (1) We absolutely rely on this information being kept until
86 we get to rtl. Once we're done with lowering here, if we lose
87 the information there's no way to recover it!
89 (2) There are many more statements that *cannot* throw as
90 compared to those that can. We should be saving some amount
91 of space by only allocating memory for those that can throw. */
93 /* Add statement T in function IFUN to landing pad NUM. */
95 static void
96 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
98 gcc_assert (num != 0);
100 if (!get_eh_throw_stmt_table (ifun))
101 set_eh_throw_stmt_table (ifun, hash_map<gimple, int>::create_ggc (31));
103 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
106 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
108 void
109 add_stmt_to_eh_lp (gimple t, int num)
111 add_stmt_to_eh_lp_fn (cfun, t, num);
114 /* Add statement T to the single EH landing pad in REGION. */
116 static void
117 record_stmt_eh_region (eh_region region, gimple t)
119 if (region == NULL)
120 return;
121 if (region->type == ERT_MUST_NOT_THROW)
122 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
123 else
125 eh_landing_pad lp = region->landing_pads;
126 if (lp == NULL)
127 lp = gen_eh_landing_pad (region);
128 else
129 gcc_assert (lp->next_lp == NULL);
130 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
135 /* Remove statement T in function IFUN from its EH landing pad. */
137 bool
138 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
140 if (!get_eh_throw_stmt_table (ifun))
141 return false;
143 if (!get_eh_throw_stmt_table (ifun)->get (t))
144 return false;
146 get_eh_throw_stmt_table (ifun)->remove (t);
147 return true;
151 /* Remove statement T in the current function (cfun) from its
152 EH landing pad. */
154 bool
155 remove_stmt_from_eh_lp (gimple t)
157 return remove_stmt_from_eh_lp_fn (cfun, t);
160 /* Determine if statement T is inside an EH region in function IFUN.
161 Positive numbers indicate a landing pad index; negative numbers
162 indicate a MUST_NOT_THROW region index; zero indicates that the
163 statement is not recorded in the region table. */
166 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
168 if (ifun->eh->throw_stmt_table == NULL)
169 return 0;
171 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
172 return lp_nr ? *lp_nr : 0;
175 /* Likewise, but always use the current function. */
178 lookup_stmt_eh_lp (gimple t)
180 /* We can get called from initialized data when -fnon-call-exceptions
181 is on; prevent crash. */
182 if (!cfun)
183 return 0;
184 return lookup_stmt_eh_lp_fn (cfun, t);
187 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
188 nodes and LABEL_DECL nodes. We will use this during the second phase to
189 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
191 struct finally_tree_node
193 /* When storing a GIMPLE_TRY, we have to record a gimple. However
194 when deciding whether a GOTO to a certain LABEL_DECL (which is a
195 tree) leaves the TRY block, its necessary to record a tree in
196 this field. Thus a treemple is used. */
197 treemple child;
198 gtry *parent;
201 /* Hashtable helpers. */
203 struct finally_tree_hasher : typed_free_remove <finally_tree_node>
205 typedef finally_tree_node *value_type;
206 typedef finally_tree_node *compare_type;
207 static inline hashval_t hash (const finally_tree_node *);
208 static inline bool equal (const finally_tree_node *,
209 const finally_tree_node *);
212 inline hashval_t
213 finally_tree_hasher::hash (const finally_tree_node *v)
215 return (intptr_t)v->child.t >> 4;
218 inline bool
219 finally_tree_hasher::equal (const finally_tree_node *v,
220 const finally_tree_node *c)
222 return v->child.t == c->child.t;
225 /* Note that this table is *not* marked GTY. It is short-lived. */
226 static hash_table<finally_tree_hasher> *finally_tree;
228 static void
229 record_in_finally_tree (treemple child, gtry *parent)
231 struct finally_tree_node *n;
232 finally_tree_node **slot;
234 n = XNEW (struct finally_tree_node);
235 n->child = child;
236 n->parent = parent;
238 slot = finally_tree->find_slot (n, INSERT);
239 gcc_assert (!*slot);
240 *slot = n;
243 static void
244 collect_finally_tree (gimple stmt, gtry *region);
246 /* Go through the gimple sequence. Works with collect_finally_tree to
247 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
249 static void
250 collect_finally_tree_1 (gimple_seq seq, gtry *region)
252 gimple_stmt_iterator gsi;
254 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
255 collect_finally_tree (gsi_stmt (gsi), region);
258 static void
259 collect_finally_tree (gimple stmt, gtry *region)
261 treemple temp;
263 switch (gimple_code (stmt))
265 case GIMPLE_LABEL:
266 temp.t = gimple_label_label (as_a <glabel *> (stmt));
267 record_in_finally_tree (temp, region);
268 break;
270 case GIMPLE_TRY:
271 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
273 temp.g = stmt;
274 record_in_finally_tree (temp, region);
275 collect_finally_tree_1 (gimple_try_eval (stmt),
276 as_a <gtry *> (stmt));
277 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
279 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
281 collect_finally_tree_1 (gimple_try_eval (stmt), region);
282 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
284 break;
286 case GIMPLE_CATCH:
287 collect_finally_tree_1 (gimple_catch_handler (
288 as_a <gcatch *> (stmt)),
289 region);
290 break;
292 case GIMPLE_EH_FILTER:
293 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
294 break;
296 case GIMPLE_EH_ELSE:
298 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
299 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
300 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
302 break;
304 default:
305 /* A type, a decl, or some kind of statement that we're not
306 interested in. Don't walk them. */
307 break;
312 /* Use the finally tree to determine if a jump from START to TARGET
313 would leave the try_finally node that START lives in. */
315 static bool
316 outside_finally_tree (treemple start, gimple target)
318 struct finally_tree_node n, *p;
322 n.child = start;
323 p = finally_tree->find (&n);
324 if (!p)
325 return true;
326 start.g = p->parent;
328 while (start.g != target);
330 return false;
333 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
334 nodes into a set of gotos, magic labels, and eh regions.
335 The eh region creation is straight-forward, but frobbing all the gotos
336 and such into shape isn't. */
338 /* The sequence into which we record all EH stuff. This will be
339 placed at the end of the function when we're all done. */
340 static gimple_seq eh_seq;
342 /* Record whether an EH region contains something that can throw,
343 indexed by EH region number. */
344 static bitmap eh_region_may_contain_throw_map;
346 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
347 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
348 The idea is to record a gimple statement for everything except for
349 the conditionals, which get their labels recorded. Since labels are
350 of type 'tree', we need this node to store both gimple and tree
351 objects. REPL_STMT is the sequence used to replace the goto/return
352 statement. CONT_STMT is used to store the statement that allows
353 the return/goto to jump to the original destination. */
355 struct goto_queue_node
357 treemple stmt;
358 location_t location;
359 gimple_seq repl_stmt;
360 gimple cont_stmt;
361 int index;
362 /* This is used when index >= 0 to indicate that stmt is a label (as
363 opposed to a goto stmt). */
364 int is_label;
367 /* State of the world while lowering. */
369 struct leh_state
371 /* What's "current" while constructing the eh region tree. These
372 correspond to variables of the same name in cfun->eh, which we
373 don't have easy access to. */
374 eh_region cur_region;
376 /* What's "current" for the purposes of __builtin_eh_pointer. For
377 a CATCH, this is the associated TRY. For an EH_FILTER, this is
378 the associated ALLOWED_EXCEPTIONS, etc. */
379 eh_region ehp_region;
381 /* Processing of TRY_FINALLY requires a bit more state. This is
382 split out into a separate structure so that we don't have to
383 copy so much when processing other nodes. */
384 struct leh_tf_state *tf;
387 struct leh_tf_state
389 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
390 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
391 this so that outside_finally_tree can reliably reference the tree used
392 in the collect_finally_tree data structures. */
393 gtry *try_finally_expr;
394 gtry *top_p;
396 /* While lowering a top_p usually it is expanded into multiple statements,
397 thus we need the following field to store them. */
398 gimple_seq top_p_seq;
400 /* The state outside this try_finally node. */
401 struct leh_state *outer;
403 /* The exception region created for it. */
404 eh_region region;
406 /* The goto queue. */
407 struct goto_queue_node *goto_queue;
408 size_t goto_queue_size;
409 size_t goto_queue_active;
411 /* Pointer map to help in searching goto_queue when it is large. */
412 hash_map<gimple, goto_queue_node *> *goto_queue_map;
414 /* The set of unique labels seen as entries in the goto queue. */
415 vec<tree> dest_array;
417 /* A label to be added at the end of the completed transformed
418 sequence. It will be set if may_fallthru was true *at one time*,
419 though subsequent transformations may have cleared that flag. */
420 tree fallthru_label;
422 /* True if it is possible to fall out the bottom of the try block.
423 Cleared if the fallthru is converted to a goto. */
424 bool may_fallthru;
426 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
427 bool may_return;
429 /* True if the finally block can receive an exception edge.
430 Cleared if the exception case is handled by code duplication. */
431 bool may_throw;
434 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
436 /* Search for STMT in the goto queue. Return the replacement,
437 or null if the statement isn't in the queue. */
439 #define LARGE_GOTO_QUEUE 20
441 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
443 static gimple_seq
444 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
446 unsigned int i;
448 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
450 for (i = 0; i < tf->goto_queue_active; i++)
451 if ( tf->goto_queue[i].stmt.g == stmt.g)
452 return tf->goto_queue[i].repl_stmt;
453 return NULL;
456 /* If we have a large number of entries in the goto_queue, create a
457 pointer map and use that for searching. */
459 if (!tf->goto_queue_map)
461 tf->goto_queue_map = new hash_map<gimple, goto_queue_node *>;
462 for (i = 0; i < tf->goto_queue_active; i++)
464 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
465 &tf->goto_queue[i]);
466 gcc_assert (!existed);
470 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
471 if (slot != NULL)
472 return ((*slot)->repl_stmt);
474 return NULL;
477 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
478 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
479 then we can just splat it in, otherwise we add the new stmts immediately
480 after the GIMPLE_COND and redirect. */
482 static void
483 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
484 gimple_stmt_iterator *gsi)
486 tree label;
487 gimple_seq new_seq;
488 treemple temp;
489 location_t loc = gimple_location (gsi_stmt (*gsi));
491 temp.tp = tp;
492 new_seq = find_goto_replacement (tf, temp);
493 if (!new_seq)
494 return;
496 if (gimple_seq_singleton_p (new_seq)
497 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
499 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
500 return;
503 label = create_artificial_label (loc);
504 /* Set the new label for the GIMPLE_COND */
505 *tp = label;
507 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
508 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
511 /* The real work of replace_goto_queue. Returns with TSI updated to
512 point to the next statement. */
514 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
516 static void
517 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
518 gimple_stmt_iterator *gsi)
520 gimple_seq seq;
521 treemple temp;
522 temp.g = NULL;
524 switch (gimple_code (stmt))
526 case GIMPLE_GOTO:
527 case GIMPLE_RETURN:
528 temp.g = stmt;
529 seq = find_goto_replacement (tf, temp);
530 if (seq)
532 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
533 gsi_remove (gsi, false);
534 return;
536 break;
538 case GIMPLE_COND:
539 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
540 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
541 break;
543 case GIMPLE_TRY:
544 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
545 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
546 break;
547 case GIMPLE_CATCH:
548 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
549 as_a <gcatch *> (stmt)),
550 tf);
551 break;
552 case GIMPLE_EH_FILTER:
553 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
554 break;
555 case GIMPLE_EH_ELSE:
557 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
558 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
559 tf);
560 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
561 tf);
563 break;
565 default:
566 /* These won't have gotos in them. */
567 break;
570 gsi_next (gsi);
573 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
575 static void
576 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
578 gimple_stmt_iterator gsi = gsi_start (*seq);
580 while (!gsi_end_p (gsi))
581 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
584 /* Replace all goto queue members. */
586 static void
587 replace_goto_queue (struct leh_tf_state *tf)
589 if (tf->goto_queue_active == 0)
590 return;
591 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
592 replace_goto_queue_stmt_list (&eh_seq, tf);
595 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
596 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
597 a gimple return. */
599 static void
600 record_in_goto_queue (struct leh_tf_state *tf,
601 treemple new_stmt,
602 int index,
603 bool is_label,
604 location_t location)
606 size_t active, size;
607 struct goto_queue_node *q;
609 gcc_assert (!tf->goto_queue_map);
611 active = tf->goto_queue_active;
612 size = tf->goto_queue_size;
613 if (active >= size)
615 size = (size ? size * 2 : 32);
616 tf->goto_queue_size = size;
617 tf->goto_queue
618 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
621 q = &tf->goto_queue[active];
622 tf->goto_queue_active = active + 1;
624 memset (q, 0, sizeof (*q));
625 q->stmt = new_stmt;
626 q->index = index;
627 q->location = location;
628 q->is_label = is_label;
631 /* Record the LABEL label in the goto queue contained in TF.
632 TF is not null. */
634 static void
635 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
636 location_t location)
638 int index;
639 treemple temp, new_stmt;
641 if (!label)
642 return;
644 /* Computed and non-local gotos do not get processed. Given
645 their nature we can neither tell whether we've escaped the
646 finally block nor redirect them if we knew. */
647 if (TREE_CODE (label) != LABEL_DECL)
648 return;
650 /* No need to record gotos that don't leave the try block. */
651 temp.t = label;
652 if (!outside_finally_tree (temp, tf->try_finally_expr))
653 return;
655 if (! tf->dest_array.exists ())
657 tf->dest_array.create (10);
658 tf->dest_array.quick_push (label);
659 index = 0;
661 else
663 int n = tf->dest_array.length ();
664 for (index = 0; index < n; ++index)
665 if (tf->dest_array[index] == label)
666 break;
667 if (index == n)
668 tf->dest_array.safe_push (label);
671 /* In the case of a GOTO we want to record the destination label,
672 since with a GIMPLE_COND we have an easy access to the then/else
673 labels. */
674 new_stmt = stmt;
675 record_in_goto_queue (tf, new_stmt, index, true, location);
678 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
679 node, and if so record that fact in the goto queue associated with that
680 try_finally node. */
682 static void
683 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
685 struct leh_tf_state *tf = state->tf;
686 treemple new_stmt;
688 if (!tf)
689 return;
691 switch (gimple_code (stmt))
693 case GIMPLE_COND:
695 gcond *cond_stmt = as_a <gcond *> (stmt);
696 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
697 record_in_goto_queue_label (tf, new_stmt,
698 gimple_cond_true_label (cond_stmt),
699 EXPR_LOCATION (*new_stmt.tp));
700 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
701 record_in_goto_queue_label (tf, new_stmt,
702 gimple_cond_false_label (cond_stmt),
703 EXPR_LOCATION (*new_stmt.tp));
705 break;
706 case GIMPLE_GOTO:
707 new_stmt.g = stmt;
708 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
709 gimple_location (stmt));
710 break;
712 case GIMPLE_RETURN:
713 tf->may_return = true;
714 new_stmt.g = stmt;
715 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
716 break;
718 default:
719 gcc_unreachable ();
724 #ifdef ENABLE_CHECKING
725 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
726 was in fact structured, and we've not yet done jump threading, then none
727 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
729 static void
730 verify_norecord_switch_expr (struct leh_state *state,
731 gswitch *switch_expr)
733 struct leh_tf_state *tf = state->tf;
734 size_t i, n;
736 if (!tf)
737 return;
739 n = gimple_switch_num_labels (switch_expr);
741 for (i = 0; i < n; ++i)
743 treemple temp;
744 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
745 temp.t = lab;
746 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
749 #else
750 #define verify_norecord_switch_expr(state, switch_expr)
751 #endif
753 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
754 non-null, insert it before the new branch. */
756 static void
757 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
759 gimple x;
761 /* In the case of a return, the queue node must be a gimple statement. */
762 gcc_assert (!q->is_label);
764 /* Note that the return value may have already been computed, e.g.,
766 int x;
767 int foo (void)
769 x = 0;
770 try {
771 return x;
772 } finally {
773 x++;
777 should return 0, not 1. We don't have to do anything to make
778 this happens because the return value has been placed in the
779 RESULT_DECL already. */
781 q->cont_stmt = q->stmt.g;
783 if (mod)
784 gimple_seq_add_seq (&q->repl_stmt, mod);
786 x = gimple_build_goto (finlab);
787 gimple_set_location (x, q->location);
788 gimple_seq_add_stmt (&q->repl_stmt, x);
791 /* Similar, but easier, for GIMPLE_GOTO. */
793 static void
794 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
795 struct leh_tf_state *tf)
797 ggoto *x;
799 gcc_assert (q->is_label);
801 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
803 if (mod)
804 gimple_seq_add_seq (&q->repl_stmt, mod);
806 x = gimple_build_goto (finlab);
807 gimple_set_location (x, q->location);
808 gimple_seq_add_stmt (&q->repl_stmt, x);
811 /* Emit a standard landing pad sequence into SEQ for REGION. */
813 static void
814 emit_post_landing_pad (gimple_seq *seq, eh_region region)
816 eh_landing_pad lp = region->landing_pads;
817 glabel *x;
819 if (lp == NULL)
820 lp = gen_eh_landing_pad (region);
822 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
823 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
825 x = gimple_build_label (lp->post_landing_pad);
826 gimple_seq_add_stmt (seq, x);
829 /* Emit a RESX statement into SEQ for REGION. */
831 static void
832 emit_resx (gimple_seq *seq, eh_region region)
834 gresx *x = gimple_build_resx (region->index);
835 gimple_seq_add_stmt (seq, x);
836 if (region->outer)
837 record_stmt_eh_region (region->outer, x);
840 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
842 static void
843 emit_eh_dispatch (gimple_seq *seq, eh_region region)
845 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
846 gimple_seq_add_stmt (seq, x);
849 /* Note that the current EH region may contain a throw, or a
850 call to a function which itself may contain a throw. */
852 static void
853 note_eh_region_may_contain_throw (eh_region region)
855 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
857 if (region->type == ERT_MUST_NOT_THROW)
858 break;
859 region = region->outer;
860 if (region == NULL)
861 break;
865 /* Check if REGION has been marked as containing a throw. If REGION is
866 NULL, this predicate is false. */
868 static inline bool
869 eh_region_may_contain_throw (eh_region r)
871 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
874 /* We want to transform
875 try { body; } catch { stuff; }
877 normal_sequence:
878 body;
879 over:
880 eh_sequence:
881 landing_pad:
882 stuff;
883 goto over;
885 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
886 should be placed before the second operand, or NULL. OVER is
887 an existing label that should be put at the exit, or NULL. */
889 static gimple_seq
890 frob_into_branch_around (gtry *tp, eh_region region, tree over)
892 gimple x;
893 gimple_seq cleanup, result;
894 location_t loc = gimple_location (tp);
896 cleanup = gimple_try_cleanup (tp);
897 result = gimple_try_eval (tp);
899 if (region)
900 emit_post_landing_pad (&eh_seq, region);
902 if (gimple_seq_may_fallthru (cleanup))
904 if (!over)
905 over = create_artificial_label (loc);
906 x = gimple_build_goto (over);
907 gimple_set_location (x, loc);
908 gimple_seq_add_stmt (&cleanup, x);
910 gimple_seq_add_seq (&eh_seq, cleanup);
912 if (over)
914 x = gimple_build_label (over);
915 gimple_seq_add_stmt (&result, x);
917 return result;
920 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
921 Make sure to record all new labels found. */
923 static gimple_seq
924 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
925 location_t loc)
927 gtry *region = NULL;
928 gimple_seq new_seq;
929 gimple_stmt_iterator gsi;
931 new_seq = copy_gimple_seq_and_replace_locals (seq);
933 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
935 gimple stmt = gsi_stmt (gsi);
936 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
938 tree block = gimple_block (stmt);
939 gimple_set_location (stmt, loc);
940 gimple_set_block (stmt, block);
944 if (outer_state->tf)
945 region = outer_state->tf->try_finally_expr;
946 collect_finally_tree_1 (new_seq, region);
948 return new_seq;
951 /* A subroutine of lower_try_finally. Create a fallthru label for
952 the given try_finally state. The only tricky bit here is that
953 we have to make sure to record the label in our outer context. */
955 static tree
956 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
958 tree label = tf->fallthru_label;
959 treemple temp;
961 if (!label)
963 label = create_artificial_label (gimple_location (tf->try_finally_expr));
964 tf->fallthru_label = label;
965 if (tf->outer->tf)
967 temp.t = label;
968 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
971 return label;
974 /* A subroutine of lower_try_finally. If FINALLY consits of a
975 GIMPLE_EH_ELSE node, return it. */
977 static inline geh_else *
978 get_eh_else (gimple_seq finally)
980 gimple x = gimple_seq_first_stmt (finally);
981 if (gimple_code (x) == GIMPLE_EH_ELSE)
983 gcc_assert (gimple_seq_singleton_p (finally));
984 return as_a <geh_else *> (x);
986 return NULL;
989 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
990 langhook returns non-null, then the language requires that the exception
991 path out of a try_finally be treated specially. To wit: the code within
992 the finally block may not itself throw an exception. We have two choices
993 here. First we can duplicate the finally block and wrap it in a
994 must_not_throw region. Second, we can generate code like
996 try {
997 finally_block;
998 } catch {
999 if (fintmp == eh_edge)
1000 protect_cleanup_actions;
1003 where "fintmp" is the temporary used in the switch statement generation
1004 alternative considered below. For the nonce, we always choose the first
1005 option.
1007 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
1009 static void
1010 honor_protect_cleanup_actions (struct leh_state *outer_state,
1011 struct leh_state *this_state,
1012 struct leh_tf_state *tf)
1014 tree protect_cleanup_actions;
1015 gimple_stmt_iterator gsi;
1016 bool finally_may_fallthru;
1017 gimple_seq finally;
1018 gimple x;
1019 geh_mnt *eh_mnt;
1020 gtry *try_stmt;
1021 geh_else *eh_else;
1023 /* First check for nothing to do. */
1024 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1025 return;
1026 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
1027 if (protect_cleanup_actions == NULL)
1028 return;
1030 finally = gimple_try_cleanup (tf->top_p);
1031 eh_else = get_eh_else (finally);
1033 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1034 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1035 if (eh_else)
1037 finally = gimple_eh_else_e_body (eh_else);
1038 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1040 else if (this_state)
1041 finally = lower_try_finally_dup_block (finally, outer_state,
1042 gimple_location (tf->try_finally_expr));
1043 finally_may_fallthru = gimple_seq_may_fallthru (finally);
1045 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1046 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1047 to be in an enclosing scope, but needs to be implemented at this level
1048 to avoid a nesting violation (see wrap_temporary_cleanups in
1049 cp/decl.c). Since it's logically at an outer level, we should call
1050 terminate before we get to it, so strip it away before adding the
1051 MUST_NOT_THROW filter. */
1052 gsi = gsi_start (finally);
1053 x = gsi_stmt (gsi);
1054 if (gimple_code (x) == GIMPLE_TRY
1055 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1056 && gimple_try_catch_is_cleanup (x))
1058 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1059 gsi_remove (&gsi, false);
1062 /* Wrap the block with protect_cleanup_actions as the action. */
1063 eh_mnt = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1064 try_stmt = gimple_build_try (finally, gimple_seq_alloc_with_stmt (eh_mnt),
1065 GIMPLE_TRY_CATCH);
1066 finally = lower_eh_must_not_throw (outer_state, try_stmt);
1068 /* Drop all of this into the exception sequence. */
1069 emit_post_landing_pad (&eh_seq, tf->region);
1070 gimple_seq_add_seq (&eh_seq, finally);
1071 if (finally_may_fallthru)
1072 emit_resx (&eh_seq, tf->region);
1074 /* Having now been handled, EH isn't to be considered with
1075 the rest of the outgoing edges. */
1076 tf->may_throw = false;
1079 /* A subroutine of lower_try_finally. We have determined that there is
1080 no fallthru edge out of the finally block. This means that there is
1081 no outgoing edge corresponding to any incoming edge. Restructure the
1082 try_finally node for this special case. */
1084 static void
1085 lower_try_finally_nofallthru (struct leh_state *state,
1086 struct leh_tf_state *tf)
1088 tree lab;
1089 gimple x;
1090 geh_else *eh_else;
1091 gimple_seq finally;
1092 struct goto_queue_node *q, *qe;
1094 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1096 /* We expect that tf->top_p is a GIMPLE_TRY. */
1097 finally = gimple_try_cleanup (tf->top_p);
1098 tf->top_p_seq = gimple_try_eval (tf->top_p);
1100 x = gimple_build_label (lab);
1101 gimple_seq_add_stmt (&tf->top_p_seq, x);
1103 q = tf->goto_queue;
1104 qe = q + tf->goto_queue_active;
1105 for (; q < qe; ++q)
1106 if (q->index < 0)
1107 do_return_redirection (q, lab, NULL);
1108 else
1109 do_goto_redirection (q, lab, NULL, tf);
1111 replace_goto_queue (tf);
1113 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1114 eh_else = get_eh_else (finally);
1115 if (eh_else)
1117 finally = gimple_eh_else_n_body (eh_else);
1118 lower_eh_constructs_1 (state, &finally);
1119 gimple_seq_add_seq (&tf->top_p_seq, finally);
1121 if (tf->may_throw)
1123 finally = gimple_eh_else_e_body (eh_else);
1124 lower_eh_constructs_1 (state, &finally);
1126 emit_post_landing_pad (&eh_seq, tf->region);
1127 gimple_seq_add_seq (&eh_seq, finally);
1130 else
1132 lower_eh_constructs_1 (state, &finally);
1133 gimple_seq_add_seq (&tf->top_p_seq, finally);
1135 if (tf->may_throw)
1137 emit_post_landing_pad (&eh_seq, tf->region);
1139 x = gimple_build_goto (lab);
1140 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1141 gimple_seq_add_stmt (&eh_seq, x);
1146 /* A subroutine of lower_try_finally. We have determined that there is
1147 exactly one destination of the finally block. Restructure the
1148 try_finally node for this special case. */
1150 static void
1151 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1153 struct goto_queue_node *q, *qe;
1154 geh_else *eh_else;
1155 glabel *label_stmt;
1156 gimple x;
1157 gimple_seq finally;
1158 gimple_stmt_iterator gsi;
1159 tree finally_label;
1160 location_t loc = gimple_location (tf->try_finally_expr);
1162 finally = gimple_try_cleanup (tf->top_p);
1163 tf->top_p_seq = gimple_try_eval (tf->top_p);
1165 /* Since there's only one destination, and the destination edge can only
1166 either be EH or non-EH, that implies that all of our incoming edges
1167 are of the same type. Therefore we can lower EH_ELSE immediately. */
1168 eh_else = get_eh_else (finally);
1169 if (eh_else)
1171 if (tf->may_throw)
1172 finally = gimple_eh_else_e_body (eh_else);
1173 else
1174 finally = gimple_eh_else_n_body (eh_else);
1177 lower_eh_constructs_1 (state, &finally);
1179 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1181 gimple stmt = gsi_stmt (gsi);
1182 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1184 tree block = gimple_block (stmt);
1185 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1186 gimple_set_block (stmt, block);
1190 if (tf->may_throw)
1192 /* Only reachable via the exception edge. Add the given label to
1193 the head of the FINALLY block. Append a RESX at the end. */
1194 emit_post_landing_pad (&eh_seq, tf->region);
1195 gimple_seq_add_seq (&eh_seq, finally);
1196 emit_resx (&eh_seq, tf->region);
1197 return;
1200 if (tf->may_fallthru)
1202 /* Only reachable via the fallthru edge. Do nothing but let
1203 the two blocks run together; we'll fall out the bottom. */
1204 gimple_seq_add_seq (&tf->top_p_seq, finally);
1205 return;
1208 finally_label = create_artificial_label (loc);
1209 label_stmt = gimple_build_label (finally_label);
1210 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
1212 gimple_seq_add_seq (&tf->top_p_seq, finally);
1214 q = tf->goto_queue;
1215 qe = q + tf->goto_queue_active;
1217 if (tf->may_return)
1219 /* Reachable by return expressions only. Redirect them. */
1220 for (; q < qe; ++q)
1221 do_return_redirection (q, finally_label, NULL);
1222 replace_goto_queue (tf);
1224 else
1226 /* Reachable by goto expressions only. Redirect them. */
1227 for (; q < qe; ++q)
1228 do_goto_redirection (q, finally_label, NULL, tf);
1229 replace_goto_queue (tf);
1231 if (tf->dest_array[0] == tf->fallthru_label)
1233 /* Reachable by goto to fallthru label only. Redirect it
1234 to the new label (already created, sadly), and do not
1235 emit the final branch out, or the fallthru label. */
1236 tf->fallthru_label = NULL;
1237 return;
1241 /* Place the original return/goto to the original destination
1242 immediately after the finally block. */
1243 x = tf->goto_queue[0].cont_stmt;
1244 gimple_seq_add_stmt (&tf->top_p_seq, x);
1245 maybe_record_in_goto_queue (state, x);
1248 /* A subroutine of lower_try_finally. There are multiple edges incoming
1249 and outgoing from the finally block. Implement this by duplicating the
1250 finally block for every destination. */
1252 static void
1253 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1255 gimple_seq finally;
1256 gimple_seq new_stmt;
1257 gimple_seq seq;
1258 gimple x;
1259 geh_else *eh_else;
1260 tree tmp;
1261 location_t tf_loc = gimple_location (tf->try_finally_expr);
1263 finally = gimple_try_cleanup (tf->top_p);
1265 /* Notice EH_ELSE, and simplify some of the remaining code
1266 by considering FINALLY to be the normal return path only. */
1267 eh_else = get_eh_else (finally);
1268 if (eh_else)
1269 finally = gimple_eh_else_n_body (eh_else);
1271 tf->top_p_seq = gimple_try_eval (tf->top_p);
1272 new_stmt = NULL;
1274 if (tf->may_fallthru)
1276 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1277 lower_eh_constructs_1 (state, &seq);
1278 gimple_seq_add_seq (&new_stmt, seq);
1280 tmp = lower_try_finally_fallthru_label (tf);
1281 x = gimple_build_goto (tmp);
1282 gimple_set_location (x, tf_loc);
1283 gimple_seq_add_stmt (&new_stmt, x);
1286 if (tf->may_throw)
1288 /* We don't need to copy the EH path of EH_ELSE,
1289 since it is only emitted once. */
1290 if (eh_else)
1291 seq = gimple_eh_else_e_body (eh_else);
1292 else
1293 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1294 lower_eh_constructs_1 (state, &seq);
1296 emit_post_landing_pad (&eh_seq, tf->region);
1297 gimple_seq_add_seq (&eh_seq, seq);
1298 emit_resx (&eh_seq, tf->region);
1301 if (tf->goto_queue)
1303 struct goto_queue_node *q, *qe;
1304 int return_index, index;
1305 struct labels_s
1307 struct goto_queue_node *q;
1308 tree label;
1309 } *labels;
1311 return_index = tf->dest_array.length ();
1312 labels = XCNEWVEC (struct labels_s, return_index + 1);
1314 q = tf->goto_queue;
1315 qe = q + tf->goto_queue_active;
1316 for (; q < qe; q++)
1318 index = q->index < 0 ? return_index : q->index;
1320 if (!labels[index].q)
1321 labels[index].q = q;
1324 for (index = 0; index < return_index + 1; index++)
1326 tree lab;
1328 q = labels[index].q;
1329 if (! q)
1330 continue;
1332 lab = labels[index].label
1333 = create_artificial_label (tf_loc);
1335 if (index == return_index)
1336 do_return_redirection (q, lab, NULL);
1337 else
1338 do_goto_redirection (q, lab, NULL, tf);
1340 x = gimple_build_label (lab);
1341 gimple_seq_add_stmt (&new_stmt, x);
1343 seq = lower_try_finally_dup_block (finally, state, q->location);
1344 lower_eh_constructs_1 (state, &seq);
1345 gimple_seq_add_seq (&new_stmt, seq);
1347 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1348 maybe_record_in_goto_queue (state, q->cont_stmt);
1351 for (q = tf->goto_queue; q < qe; q++)
1353 tree lab;
1355 index = q->index < 0 ? return_index : q->index;
1357 if (labels[index].q == q)
1358 continue;
1360 lab = labels[index].label;
1362 if (index == return_index)
1363 do_return_redirection (q, lab, NULL);
1364 else
1365 do_goto_redirection (q, lab, NULL, tf);
1368 replace_goto_queue (tf);
1369 free (labels);
1372 /* Need to link new stmts after running replace_goto_queue due
1373 to not wanting to process the same goto stmts twice. */
1374 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1377 /* A subroutine of lower_try_finally. There are multiple edges incoming
1378 and outgoing from the finally block. Implement this by instrumenting
1379 each incoming edge and creating a switch statement at the end of the
1380 finally block that branches to the appropriate destination. */
1382 static void
1383 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1385 struct goto_queue_node *q, *qe;
1386 tree finally_tmp, finally_label;
1387 int return_index, eh_index, fallthru_index;
1388 int nlabels, ndests, j, last_case_index;
1389 tree last_case;
1390 vec<tree> case_label_vec;
1391 gimple_seq switch_body = NULL;
1392 gimple x;
1393 geh_else *eh_else;
1394 tree tmp;
1395 gimple switch_stmt;
1396 gimple_seq finally;
1397 hash_map<tree, gimple> *cont_map = NULL;
1398 /* The location of the TRY_FINALLY stmt. */
1399 location_t tf_loc = gimple_location (tf->try_finally_expr);
1400 /* The location of the finally block. */
1401 location_t finally_loc;
1403 finally = gimple_try_cleanup (tf->top_p);
1404 eh_else = get_eh_else (finally);
1406 /* Mash the TRY block to the head of the chain. */
1407 tf->top_p_seq = gimple_try_eval (tf->top_p);
1409 /* The location of the finally is either the last stmt in the finally
1410 block or the location of the TRY_FINALLY itself. */
1411 x = gimple_seq_last_stmt (finally);
1412 finally_loc = x ? gimple_location (x) : tf_loc;
1414 /* Prepare for switch statement generation. */
1415 nlabels = tf->dest_array.length ();
1416 return_index = nlabels;
1417 eh_index = return_index + tf->may_return;
1418 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1419 ndests = fallthru_index + tf->may_fallthru;
1421 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1422 finally_label = create_artificial_label (finally_loc);
1424 /* We use vec::quick_push on case_label_vec throughout this function,
1425 since we know the size in advance and allocate precisely as muce
1426 space as needed. */
1427 case_label_vec.create (ndests);
1428 last_case = NULL;
1429 last_case_index = 0;
1431 /* Begin inserting code for getting to the finally block. Things
1432 are done in this order to correspond to the sequence the code is
1433 laid out. */
1435 if (tf->may_fallthru)
1437 x = gimple_build_assign (finally_tmp,
1438 build_int_cst (integer_type_node,
1439 fallthru_index));
1440 gimple_seq_add_stmt (&tf->top_p_seq, x);
1442 tmp = build_int_cst (integer_type_node, fallthru_index);
1443 last_case = build_case_label (tmp, NULL,
1444 create_artificial_label (tf_loc));
1445 case_label_vec.quick_push (last_case);
1446 last_case_index++;
1448 x = gimple_build_label (CASE_LABEL (last_case));
1449 gimple_seq_add_stmt (&switch_body, x);
1451 tmp = lower_try_finally_fallthru_label (tf);
1452 x = gimple_build_goto (tmp);
1453 gimple_set_location (x, tf_loc);
1454 gimple_seq_add_stmt (&switch_body, x);
1457 /* For EH_ELSE, emit the exception path (plus resx) now, then
1458 subsequently we only need consider the normal path. */
1459 if (eh_else)
1461 if (tf->may_throw)
1463 finally = gimple_eh_else_e_body (eh_else);
1464 lower_eh_constructs_1 (state, &finally);
1466 emit_post_landing_pad (&eh_seq, tf->region);
1467 gimple_seq_add_seq (&eh_seq, finally);
1468 emit_resx (&eh_seq, tf->region);
1471 finally = gimple_eh_else_n_body (eh_else);
1473 else if (tf->may_throw)
1475 emit_post_landing_pad (&eh_seq, tf->region);
1477 x = gimple_build_assign (finally_tmp,
1478 build_int_cst (integer_type_node, eh_index));
1479 gimple_seq_add_stmt (&eh_seq, x);
1481 x = gimple_build_goto (finally_label);
1482 gimple_set_location (x, tf_loc);
1483 gimple_seq_add_stmt (&eh_seq, x);
1485 tmp = build_int_cst (integer_type_node, eh_index);
1486 last_case = build_case_label (tmp, NULL,
1487 create_artificial_label (tf_loc));
1488 case_label_vec.quick_push (last_case);
1489 last_case_index++;
1491 x = gimple_build_label (CASE_LABEL (last_case));
1492 gimple_seq_add_stmt (&eh_seq, x);
1493 emit_resx (&eh_seq, tf->region);
1496 x = gimple_build_label (finally_label);
1497 gimple_seq_add_stmt (&tf->top_p_seq, x);
1499 lower_eh_constructs_1 (state, &finally);
1500 gimple_seq_add_seq (&tf->top_p_seq, finally);
1502 /* Redirect each incoming goto edge. */
1503 q = tf->goto_queue;
1504 qe = q + tf->goto_queue_active;
1505 j = last_case_index + tf->may_return;
1506 /* Prepare the assignments to finally_tmp that are executed upon the
1507 entrance through a particular edge. */
1508 for (; q < qe; ++q)
1510 gimple_seq mod = NULL;
1511 int switch_id;
1512 unsigned int case_index;
1514 if (q->index < 0)
1516 x = gimple_build_assign (finally_tmp,
1517 build_int_cst (integer_type_node,
1518 return_index));
1519 gimple_seq_add_stmt (&mod, x);
1520 do_return_redirection (q, finally_label, mod);
1521 switch_id = return_index;
1523 else
1525 x = gimple_build_assign (finally_tmp,
1526 build_int_cst (integer_type_node, q->index));
1527 gimple_seq_add_stmt (&mod, x);
1528 do_goto_redirection (q, finally_label, mod, tf);
1529 switch_id = q->index;
1532 case_index = j + q->index;
1533 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1535 tree case_lab;
1536 tmp = build_int_cst (integer_type_node, switch_id);
1537 case_lab = build_case_label (tmp, NULL,
1538 create_artificial_label (tf_loc));
1539 /* We store the cont_stmt in the pointer map, so that we can recover
1540 it in the loop below. */
1541 if (!cont_map)
1542 cont_map = new hash_map<tree, gimple>;
1543 cont_map->put (case_lab, q->cont_stmt);
1544 case_label_vec.quick_push (case_lab);
1547 for (j = last_case_index; j < last_case_index + nlabels; j++)
1549 gimple cont_stmt;
1551 last_case = case_label_vec[j];
1553 gcc_assert (last_case);
1554 gcc_assert (cont_map);
1556 cont_stmt = *cont_map->get (last_case);
1558 x = gimple_build_label (CASE_LABEL (last_case));
1559 gimple_seq_add_stmt (&switch_body, x);
1560 gimple_seq_add_stmt (&switch_body, cont_stmt);
1561 maybe_record_in_goto_queue (state, cont_stmt);
1563 if (cont_map)
1564 delete cont_map;
1566 replace_goto_queue (tf);
1568 /* Make sure that the last case is the default label, as one is required.
1569 Then sort the labels, which is also required in GIMPLE. */
1570 CASE_LOW (last_case) = NULL;
1571 tree tem = case_label_vec.pop ();
1572 gcc_assert (tem == last_case);
1573 sort_case_labels (case_label_vec);
1575 /* Build the switch statement, setting last_case to be the default
1576 label. */
1577 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1578 case_label_vec);
1579 gimple_set_location (switch_stmt, finally_loc);
1581 /* Need to link SWITCH_STMT after running replace_goto_queue
1582 due to not wanting to process the same goto stmts twice. */
1583 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1584 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1587 /* Decide whether or not we are going to duplicate the finally block.
1588 There are several considerations.
1590 First, if this is Java, then the finally block contains code
1591 written by the user. It has line numbers associated with it,
1592 so duplicating the block means it's difficult to set a breakpoint.
1593 Since controlling code generation via -g is verboten, we simply
1594 never duplicate code without optimization.
1596 Second, we'd like to prevent egregious code growth. One way to
1597 do this is to estimate the size of the finally block, multiply
1598 that by the number of copies we'd need to make, and compare against
1599 the estimate of the size of the switch machinery we'd have to add. */
1601 static bool
1602 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1604 int f_estimate, sw_estimate;
1605 geh_else *eh_else;
1607 /* If there's an EH_ELSE involved, the exception path is separate
1608 and really doesn't come into play for this computation. */
1609 eh_else = get_eh_else (finally);
1610 if (eh_else)
1612 ndests -= may_throw;
1613 finally = gimple_eh_else_n_body (eh_else);
1616 if (!optimize)
1618 gimple_stmt_iterator gsi;
1620 if (ndests == 1)
1621 return true;
1623 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1625 gimple stmt = gsi_stmt (gsi);
1626 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1627 return false;
1629 return true;
1632 /* Finally estimate N times, plus N gotos. */
1633 f_estimate = count_insns_seq (finally, &eni_size_weights);
1634 f_estimate = (f_estimate + 1) * ndests;
1636 /* Switch statement (cost 10), N variable assignments, N gotos. */
1637 sw_estimate = 10 + 2 * ndests;
1639 /* Optimize for size clearly wants our best guess. */
1640 if (optimize_function_for_size_p (cfun))
1641 return f_estimate < sw_estimate;
1643 /* ??? These numbers are completely made up so far. */
1644 if (optimize > 1)
1645 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1646 else
1647 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1650 /* REG is the enclosing region for a possible cleanup region, or the region
1651 itself. Returns TRUE if such a region would be unreachable.
1653 Cleanup regions within a must-not-throw region aren't actually reachable
1654 even if there are throwing stmts within them, because the personality
1655 routine will call terminate before unwinding. */
1657 static bool
1658 cleanup_is_dead_in (eh_region reg)
1660 while (reg && reg->type == ERT_CLEANUP)
1661 reg = reg->outer;
1662 return (reg && reg->type == ERT_MUST_NOT_THROW);
1665 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1666 to a sequence of labels and blocks, plus the exception region trees
1667 that record all the magic. This is complicated by the need to
1668 arrange for the FINALLY block to be executed on all exits. */
1670 static gimple_seq
1671 lower_try_finally (struct leh_state *state, gtry *tp)
1673 struct leh_tf_state this_tf;
1674 struct leh_state this_state;
1675 int ndests;
1676 gimple_seq old_eh_seq;
1678 /* Process the try block. */
1680 memset (&this_tf, 0, sizeof (this_tf));
1681 this_tf.try_finally_expr = tp;
1682 this_tf.top_p = tp;
1683 this_tf.outer = state;
1684 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
1686 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1687 this_state.cur_region = this_tf.region;
1689 else
1691 this_tf.region = NULL;
1692 this_state.cur_region = state->cur_region;
1695 this_state.ehp_region = state->ehp_region;
1696 this_state.tf = &this_tf;
1698 old_eh_seq = eh_seq;
1699 eh_seq = NULL;
1701 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1703 /* Determine if the try block is escaped through the bottom. */
1704 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1706 /* Determine if any exceptions are possible within the try block. */
1707 if (this_tf.region)
1708 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1709 if (this_tf.may_throw)
1710 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1712 /* Determine how many edges (still) reach the finally block. Or rather,
1713 how many destinations are reached by the finally block. Use this to
1714 determine how we process the finally block itself. */
1716 ndests = this_tf.dest_array.length ();
1717 ndests += this_tf.may_fallthru;
1718 ndests += this_tf.may_return;
1719 ndests += this_tf.may_throw;
1721 /* If the FINALLY block is not reachable, dike it out. */
1722 if (ndests == 0)
1724 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1725 gimple_try_set_cleanup (tp, NULL);
1727 /* If the finally block doesn't fall through, then any destination
1728 we might try to impose there isn't reached either. There may be
1729 some minor amount of cleanup and redirection still needed. */
1730 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1731 lower_try_finally_nofallthru (state, &this_tf);
1733 /* We can easily special-case redirection to a single destination. */
1734 else if (ndests == 1)
1735 lower_try_finally_onedest (state, &this_tf);
1736 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1737 gimple_try_cleanup (tp)))
1738 lower_try_finally_copy (state, &this_tf);
1739 else
1740 lower_try_finally_switch (state, &this_tf);
1742 /* If someone requested we add a label at the end of the transformed
1743 block, do so. */
1744 if (this_tf.fallthru_label)
1746 /* This must be reached only if ndests == 0. */
1747 gimple x = gimple_build_label (this_tf.fallthru_label);
1748 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1751 this_tf.dest_array.release ();
1752 free (this_tf.goto_queue);
1753 if (this_tf.goto_queue_map)
1754 delete this_tf.goto_queue_map;
1756 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1757 If there was no old eh_seq, then the append is trivially already done. */
1758 if (old_eh_seq)
1760 if (eh_seq == NULL)
1761 eh_seq = old_eh_seq;
1762 else
1764 gimple_seq new_eh_seq = eh_seq;
1765 eh_seq = old_eh_seq;
1766 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1770 return this_tf.top_p_seq;
1773 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1774 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1775 exception region trees that records all the magic. */
1777 static gimple_seq
1778 lower_catch (struct leh_state *state, gtry *tp)
1780 eh_region try_region = NULL;
1781 struct leh_state this_state = *state;
1782 gimple_stmt_iterator gsi;
1783 tree out_label;
1784 gimple_seq new_seq, cleanup;
1785 gimple x;
1786 location_t try_catch_loc = gimple_location (tp);
1788 if (flag_exceptions)
1790 try_region = gen_eh_region_try (state->cur_region);
1791 this_state.cur_region = try_region;
1794 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1796 if (!eh_region_may_contain_throw (try_region))
1797 return gimple_try_eval (tp);
1799 new_seq = NULL;
1800 emit_eh_dispatch (&new_seq, try_region);
1801 emit_resx (&new_seq, try_region);
1803 this_state.cur_region = state->cur_region;
1804 this_state.ehp_region = try_region;
1806 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1807 itself, so that e.g. for coverage purposes the nested cleanups don't
1808 appear before the cleanup body. See PR64634 for details. */
1809 gimple_seq old_eh_seq = eh_seq;
1810 eh_seq = NULL;
1812 out_label = NULL;
1813 cleanup = gimple_try_cleanup (tp);
1814 for (gsi = gsi_start (cleanup);
1815 !gsi_end_p (gsi);
1816 gsi_next (&gsi))
1818 eh_catch c;
1819 gcatch *catch_stmt;
1820 gimple_seq handler;
1822 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1823 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
1825 handler = gimple_catch_handler (catch_stmt);
1826 lower_eh_constructs_1 (&this_state, &handler);
1828 c->label = create_artificial_label (UNKNOWN_LOCATION);
1829 x = gimple_build_label (c->label);
1830 gimple_seq_add_stmt (&new_seq, x);
1832 gimple_seq_add_seq (&new_seq, handler);
1834 if (gimple_seq_may_fallthru (new_seq))
1836 if (!out_label)
1837 out_label = create_artificial_label (try_catch_loc);
1839 x = gimple_build_goto (out_label);
1840 gimple_seq_add_stmt (&new_seq, x);
1842 if (!c->type_list)
1843 break;
1846 gimple_try_set_cleanup (tp, new_seq);
1848 gimple_seq new_eh_seq = eh_seq;
1849 eh_seq = old_eh_seq;
1850 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1851 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1852 return ret_seq;
1855 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1856 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1857 region trees that record all the magic. */
1859 static gimple_seq
1860 lower_eh_filter (struct leh_state *state, gtry *tp)
1862 struct leh_state this_state = *state;
1863 eh_region this_region = NULL;
1864 gimple inner, x;
1865 gimple_seq new_seq;
1867 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1869 if (flag_exceptions)
1871 this_region = gen_eh_region_allowed (state->cur_region,
1872 gimple_eh_filter_types (inner));
1873 this_state.cur_region = this_region;
1876 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1878 if (!eh_region_may_contain_throw (this_region))
1879 return gimple_try_eval (tp);
1881 new_seq = NULL;
1882 this_state.cur_region = state->cur_region;
1883 this_state.ehp_region = this_region;
1885 emit_eh_dispatch (&new_seq, this_region);
1886 emit_resx (&new_seq, this_region);
1888 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1889 x = gimple_build_label (this_region->u.allowed.label);
1890 gimple_seq_add_stmt (&new_seq, x);
1892 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1893 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1895 gimple_try_set_cleanup (tp, new_seq);
1897 return frob_into_branch_around (tp, this_region, NULL);
1900 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1901 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1902 plus the exception region trees that record all the magic. */
1904 static gimple_seq
1905 lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
1907 struct leh_state this_state = *state;
1909 if (flag_exceptions)
1911 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1912 eh_region this_region;
1914 this_region = gen_eh_region_must_not_throw (state->cur_region);
1915 this_region->u.must_not_throw.failure_decl
1916 = gimple_eh_must_not_throw_fndecl (
1917 as_a <geh_mnt *> (inner));
1918 this_region->u.must_not_throw.failure_loc
1919 = LOCATION_LOCUS (gimple_location (tp));
1921 /* In order to get mangling applied to this decl, we must mark it
1922 used now. Otherwise, pass_ipa_free_lang_data won't think it
1923 needs to happen. */
1924 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1926 this_state.cur_region = this_region;
1929 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1931 return gimple_try_eval (tp);
1934 /* Implement a cleanup expression. This is similar to try-finally,
1935 except that we only execute the cleanup block for exception edges. */
1937 static gimple_seq
1938 lower_cleanup (struct leh_state *state, gtry *tp)
1940 struct leh_state this_state = *state;
1941 eh_region this_region = NULL;
1942 struct leh_tf_state fake_tf;
1943 gimple_seq result;
1944 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1946 if (flag_exceptions && !cleanup_dead)
1948 this_region = gen_eh_region_cleanup (state->cur_region);
1949 this_state.cur_region = this_region;
1952 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1954 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1955 return gimple_try_eval (tp);
1957 /* Build enough of a try-finally state so that we can reuse
1958 honor_protect_cleanup_actions. */
1959 memset (&fake_tf, 0, sizeof (fake_tf));
1960 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1961 fake_tf.outer = state;
1962 fake_tf.region = this_region;
1963 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1964 fake_tf.may_throw = true;
1966 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1968 if (fake_tf.may_throw)
1970 /* In this case honor_protect_cleanup_actions had nothing to do,
1971 and we should process this normally. */
1972 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1973 result = frob_into_branch_around (tp, this_region,
1974 fake_tf.fallthru_label);
1976 else
1978 /* In this case honor_protect_cleanup_actions did nearly all of
1979 the work. All we have left is to append the fallthru_label. */
1981 result = gimple_try_eval (tp);
1982 if (fake_tf.fallthru_label)
1984 gimple x = gimple_build_label (fake_tf.fallthru_label);
1985 gimple_seq_add_stmt (&result, x);
1988 return result;
1991 /* Main loop for lowering eh constructs. Also moves gsi to the next
1992 statement. */
1994 static void
1995 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1997 gimple_seq replace;
1998 gimple x;
1999 gimple stmt = gsi_stmt (*gsi);
2001 switch (gimple_code (stmt))
2003 case GIMPLE_CALL:
2005 tree fndecl = gimple_call_fndecl (stmt);
2006 tree rhs, lhs;
2008 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2009 switch (DECL_FUNCTION_CODE (fndecl))
2011 case BUILT_IN_EH_POINTER:
2012 /* The front end may have generated a call to
2013 __builtin_eh_pointer (0) within a catch region. Replace
2014 this zero argument with the current catch region number. */
2015 if (state->ehp_region)
2017 tree nr = build_int_cst (integer_type_node,
2018 state->ehp_region->index);
2019 gimple_call_set_arg (stmt, 0, nr);
2021 else
2023 /* The user has dome something silly. Remove it. */
2024 rhs = null_pointer_node;
2025 goto do_replace;
2027 break;
2029 case BUILT_IN_EH_FILTER:
2030 /* ??? This should never appear, but since it's a builtin it
2031 is accessible to abuse by users. Just remove it and
2032 replace the use with the arbitrary value zero. */
2033 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2034 do_replace:
2035 lhs = gimple_call_lhs (stmt);
2036 x = gimple_build_assign (lhs, rhs);
2037 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2038 /* FALLTHRU */
2040 case BUILT_IN_EH_COPY_VALUES:
2041 /* Likewise this should not appear. Remove it. */
2042 gsi_remove (gsi, true);
2043 return;
2045 default:
2046 break;
2049 /* FALLTHRU */
2051 case GIMPLE_ASSIGN:
2052 /* If the stmt can throw use a new temporary for the assignment
2053 to a LHS. This makes sure the old value of the LHS is
2054 available on the EH edge. Only do so for statements that
2055 potentially fall through (no noreturn calls e.g.), otherwise
2056 this new assignment might create fake fallthru regions. */
2057 if (stmt_could_throw_p (stmt)
2058 && gimple_has_lhs (stmt)
2059 && gimple_stmt_may_fallthru (stmt)
2060 && !tree_could_throw_p (gimple_get_lhs (stmt))
2061 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2063 tree lhs = gimple_get_lhs (stmt);
2064 tree tmp = create_tmp_var (TREE_TYPE (lhs));
2065 gimple s = gimple_build_assign (lhs, tmp);
2066 gimple_set_location (s, gimple_location (stmt));
2067 gimple_set_block (s, gimple_block (stmt));
2068 gimple_set_lhs (stmt, tmp);
2069 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2070 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2071 DECL_GIMPLE_REG_P (tmp) = 1;
2072 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2074 /* Look for things that can throw exceptions, and record them. */
2075 if (state->cur_region && stmt_could_throw_p (stmt))
2077 record_stmt_eh_region (state->cur_region, stmt);
2078 note_eh_region_may_contain_throw (state->cur_region);
2080 break;
2082 case GIMPLE_COND:
2083 case GIMPLE_GOTO:
2084 case GIMPLE_RETURN:
2085 maybe_record_in_goto_queue (state, stmt);
2086 break;
2088 case GIMPLE_SWITCH:
2089 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
2090 break;
2092 case GIMPLE_TRY:
2094 gtry *try_stmt = as_a <gtry *> (stmt);
2095 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2096 replace = lower_try_finally (state, try_stmt);
2097 else
2099 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2100 if (!x)
2102 replace = gimple_try_eval (try_stmt);
2103 lower_eh_constructs_1 (state, &replace);
2105 else
2106 switch (gimple_code (x))
2108 case GIMPLE_CATCH:
2109 replace = lower_catch (state, try_stmt);
2110 break;
2111 case GIMPLE_EH_FILTER:
2112 replace = lower_eh_filter (state, try_stmt);
2113 break;
2114 case GIMPLE_EH_MUST_NOT_THROW:
2115 replace = lower_eh_must_not_throw (state, try_stmt);
2116 break;
2117 case GIMPLE_EH_ELSE:
2118 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2119 gcc_unreachable ();
2120 default:
2121 replace = lower_cleanup (state, try_stmt);
2122 break;
2127 /* Remove the old stmt and insert the transformed sequence
2128 instead. */
2129 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2130 gsi_remove (gsi, true);
2132 /* Return since we don't want gsi_next () */
2133 return;
2135 case GIMPLE_EH_ELSE:
2136 /* We should be eliminating this in lower_try_finally et al. */
2137 gcc_unreachable ();
2139 default:
2140 /* A type, a decl, or some kind of statement that we're not
2141 interested in. Don't walk them. */
2142 break;
2145 gsi_next (gsi);
2148 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2150 static void
2151 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2153 gimple_stmt_iterator gsi;
2154 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2155 lower_eh_constructs_2 (state, &gsi);
2158 namespace {
2160 const pass_data pass_data_lower_eh =
2162 GIMPLE_PASS, /* type */
2163 "eh", /* name */
2164 OPTGROUP_NONE, /* optinfo_flags */
2165 TV_TREE_EH, /* tv_id */
2166 PROP_gimple_lcf, /* properties_required */
2167 PROP_gimple_leh, /* properties_provided */
2168 0, /* properties_destroyed */
2169 0, /* todo_flags_start */
2170 0, /* todo_flags_finish */
2173 class pass_lower_eh : public gimple_opt_pass
2175 public:
2176 pass_lower_eh (gcc::context *ctxt)
2177 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2180 /* opt_pass methods: */
2181 virtual unsigned int execute (function *);
2183 }; // class pass_lower_eh
2185 unsigned int
2186 pass_lower_eh::execute (function *fun)
2188 struct leh_state null_state;
2189 gimple_seq bodyp;
2191 bodyp = gimple_body (current_function_decl);
2192 if (bodyp == NULL)
2193 return 0;
2195 finally_tree = new hash_table<finally_tree_hasher> (31);
2196 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2197 memset (&null_state, 0, sizeof (null_state));
2199 collect_finally_tree_1 (bodyp, NULL);
2200 lower_eh_constructs_1 (&null_state, &bodyp);
2201 gimple_set_body (current_function_decl, bodyp);
2203 /* We assume there's a return statement, or something, at the end of
2204 the function, and thus ploping the EH sequence afterward won't
2205 change anything. */
2206 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2207 gimple_seq_add_seq (&bodyp, eh_seq);
2209 /* We assume that since BODYP already existed, adding EH_SEQ to it
2210 didn't change its value, and we don't have to re-set the function. */
2211 gcc_assert (bodyp == gimple_body (current_function_decl));
2213 delete finally_tree;
2214 finally_tree = NULL;
2215 BITMAP_FREE (eh_region_may_contain_throw_map);
2216 eh_seq = NULL;
2218 /* If this function needs a language specific EH personality routine
2219 and the frontend didn't already set one do so now. */
2220 if (function_needs_eh_personality (fun) == eh_personality_lang
2221 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2222 DECL_FUNCTION_PERSONALITY (current_function_decl)
2223 = lang_hooks.eh_personality ();
2225 return 0;
2228 } // anon namespace
2230 gimple_opt_pass *
2231 make_pass_lower_eh (gcc::context *ctxt)
2233 return new pass_lower_eh (ctxt);
2236 /* Create the multiple edges from an EH_DISPATCH statement to all of
2237 the possible handlers for its EH region. Return true if there's
2238 no fallthru edge; false if there is. */
2240 bool
2241 make_eh_dispatch_edges (geh_dispatch *stmt)
2243 eh_region r;
2244 eh_catch c;
2245 basic_block src, dst;
2247 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2248 src = gimple_bb (stmt);
2250 switch (r->type)
2252 case ERT_TRY:
2253 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2255 dst = label_to_block (c->label);
2256 make_edge (src, dst, 0);
2258 /* A catch-all handler doesn't have a fallthru. */
2259 if (c->type_list == NULL)
2260 return false;
2262 break;
2264 case ERT_ALLOWED_EXCEPTIONS:
2265 dst = label_to_block (r->u.allowed.label);
2266 make_edge (src, dst, 0);
2267 break;
2269 default:
2270 gcc_unreachable ();
2273 return true;
2276 /* Create the single EH edge from STMT to its nearest landing pad,
2277 if there is such a landing pad within the current function. */
2279 void
2280 make_eh_edges (gimple stmt)
2282 basic_block src, dst;
2283 eh_landing_pad lp;
2284 int lp_nr;
2286 lp_nr = lookup_stmt_eh_lp (stmt);
2287 if (lp_nr <= 0)
2288 return;
2290 lp = get_eh_landing_pad_from_number (lp_nr);
2291 gcc_assert (lp != NULL);
2293 src = gimple_bb (stmt);
2294 dst = label_to_block (lp->post_landing_pad);
2295 make_edge (src, dst, EDGE_EH);
2298 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2299 do not actually perform the final edge redirection.
2301 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2302 we intend to change the destination EH region as well; this means
2303 EH_LANDING_PAD_NR must already be set on the destination block label.
2304 If false, we're being called from generic cfg manipulation code and we
2305 should preserve our place within the region tree. */
2307 static void
2308 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2310 eh_landing_pad old_lp, new_lp;
2311 basic_block old_bb;
2312 gimple throw_stmt;
2313 int old_lp_nr, new_lp_nr;
2314 tree old_label, new_label;
2315 edge_iterator ei;
2316 edge e;
2318 old_bb = edge_in->dest;
2319 old_label = gimple_block_label (old_bb);
2320 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2321 gcc_assert (old_lp_nr > 0);
2322 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2324 throw_stmt = last_stmt (edge_in->src);
2325 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2327 new_label = gimple_block_label (new_bb);
2329 /* Look for an existing region that might be using NEW_BB already. */
2330 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2331 if (new_lp_nr)
2333 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2334 gcc_assert (new_lp);
2336 /* Unless CHANGE_REGION is true, the new and old landing pad
2337 had better be associated with the same EH region. */
2338 gcc_assert (change_region || new_lp->region == old_lp->region);
2340 else
2342 new_lp = NULL;
2343 gcc_assert (!change_region);
2346 /* Notice when we redirect the last EH edge away from OLD_BB. */
2347 FOR_EACH_EDGE (e, ei, old_bb->preds)
2348 if (e != edge_in && (e->flags & EDGE_EH))
2349 break;
2351 if (new_lp)
2353 /* NEW_LP already exists. If there are still edges into OLD_LP,
2354 there's nothing to do with the EH tree. If there are no more
2355 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2356 If CHANGE_REGION is true, then our caller is expecting to remove
2357 the landing pad. */
2358 if (e == NULL && !change_region)
2359 remove_eh_landing_pad (old_lp);
2361 else
2363 /* No correct landing pad exists. If there are no more edges
2364 into OLD_LP, then we can simply re-use the existing landing pad.
2365 Otherwise, we have to create a new landing pad. */
2366 if (e == NULL)
2368 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2369 new_lp = old_lp;
2371 else
2372 new_lp = gen_eh_landing_pad (old_lp->region);
2373 new_lp->post_landing_pad = new_label;
2374 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2377 /* Maybe move the throwing statement to the new region. */
2378 if (old_lp != new_lp)
2380 remove_stmt_from_eh_lp (throw_stmt);
2381 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2385 /* Redirect EH edge E to NEW_BB. */
2387 edge
2388 redirect_eh_edge (edge edge_in, basic_block new_bb)
2390 redirect_eh_edge_1 (edge_in, new_bb, false);
2391 return ssa_redirect_edge (edge_in, new_bb);
2394 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2395 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2396 The actual edge update will happen in the caller. */
2398 void
2399 redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
2401 tree new_lab = gimple_block_label (new_bb);
2402 bool any_changed = false;
2403 basic_block old_bb;
2404 eh_region r;
2405 eh_catch c;
2407 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2408 switch (r->type)
2410 case ERT_TRY:
2411 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2413 old_bb = label_to_block (c->label);
2414 if (old_bb == e->dest)
2416 c->label = new_lab;
2417 any_changed = true;
2420 break;
2422 case ERT_ALLOWED_EXCEPTIONS:
2423 old_bb = label_to_block (r->u.allowed.label);
2424 gcc_assert (old_bb == e->dest);
2425 r->u.allowed.label = new_lab;
2426 any_changed = true;
2427 break;
2429 default:
2430 gcc_unreachable ();
2433 gcc_assert (any_changed);
2436 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2438 bool
2439 operation_could_trap_helper_p (enum tree_code op,
2440 bool fp_operation,
2441 bool honor_trapv,
2442 bool honor_nans,
2443 bool honor_snans,
2444 tree divisor,
2445 bool *handled)
2447 *handled = true;
2448 switch (op)
2450 case TRUNC_DIV_EXPR:
2451 case CEIL_DIV_EXPR:
2452 case FLOOR_DIV_EXPR:
2453 case ROUND_DIV_EXPR:
2454 case EXACT_DIV_EXPR:
2455 case CEIL_MOD_EXPR:
2456 case FLOOR_MOD_EXPR:
2457 case ROUND_MOD_EXPR:
2458 case TRUNC_MOD_EXPR:
2459 case RDIV_EXPR:
2460 if (honor_snans || honor_trapv)
2461 return true;
2462 if (fp_operation)
2463 return flag_trapping_math;
2464 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2465 return true;
2466 return false;
2468 case LT_EXPR:
2469 case LE_EXPR:
2470 case GT_EXPR:
2471 case GE_EXPR:
2472 case LTGT_EXPR:
2473 /* Some floating point comparisons may trap. */
2474 return honor_nans;
2476 case EQ_EXPR:
2477 case NE_EXPR:
2478 case UNORDERED_EXPR:
2479 case ORDERED_EXPR:
2480 case UNLT_EXPR:
2481 case UNLE_EXPR:
2482 case UNGT_EXPR:
2483 case UNGE_EXPR:
2484 case UNEQ_EXPR:
2485 return honor_snans;
2487 case NEGATE_EXPR:
2488 case ABS_EXPR:
2489 case CONJ_EXPR:
2490 /* These operations don't trap with floating point. */
2491 if (honor_trapv)
2492 return true;
2493 return false;
2495 case PLUS_EXPR:
2496 case MINUS_EXPR:
2497 case MULT_EXPR:
2498 /* Any floating arithmetic may trap. */
2499 if (fp_operation && flag_trapping_math)
2500 return true;
2501 if (honor_trapv)
2502 return true;
2503 return false;
2505 case COMPLEX_EXPR:
2506 case CONSTRUCTOR:
2507 /* Constructing an object cannot trap. */
2508 return false;
2510 default:
2511 /* Any floating arithmetic may trap. */
2512 if (fp_operation && flag_trapping_math)
2513 return true;
2515 *handled = false;
2516 return false;
2520 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2521 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2522 type operands that may trap. If OP is a division operator, DIVISOR contains
2523 the value of the divisor. */
2525 bool
2526 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2527 tree divisor)
2529 bool honor_nans = (fp_operation && flag_trapping_math
2530 && !flag_finite_math_only);
2531 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2532 bool handled;
2534 if (TREE_CODE_CLASS (op) != tcc_comparison
2535 && TREE_CODE_CLASS (op) != tcc_unary
2536 && TREE_CODE_CLASS (op) != tcc_binary)
2537 return false;
2539 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2540 honor_nans, honor_snans, divisor,
2541 &handled);
2545 /* Returns true if it is possible to prove that the index of
2546 an array access REF (an ARRAY_REF expression) falls into the
2547 array bounds. */
2549 static bool
2550 in_array_bounds_p (tree ref)
2552 tree idx = TREE_OPERAND (ref, 1);
2553 tree min, max;
2555 if (TREE_CODE (idx) != INTEGER_CST)
2556 return false;
2558 min = array_ref_low_bound (ref);
2559 max = array_ref_up_bound (ref);
2560 if (!min
2561 || !max
2562 || TREE_CODE (min) != INTEGER_CST
2563 || TREE_CODE (max) != INTEGER_CST)
2564 return false;
2566 if (tree_int_cst_lt (idx, min)
2567 || tree_int_cst_lt (max, idx))
2568 return false;
2570 return true;
2573 /* Returns true if it is possible to prove that the range of
2574 an array access REF (an ARRAY_RANGE_REF expression) falls
2575 into the array bounds. */
2577 static bool
2578 range_in_array_bounds_p (tree ref)
2580 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2581 tree range_min, range_max, min, max;
2583 range_min = TYPE_MIN_VALUE (domain_type);
2584 range_max = TYPE_MAX_VALUE (domain_type);
2585 if (!range_min
2586 || !range_max
2587 || TREE_CODE (range_min) != INTEGER_CST
2588 || TREE_CODE (range_max) != INTEGER_CST)
2589 return false;
2591 min = array_ref_low_bound (ref);
2592 max = array_ref_up_bound (ref);
2593 if (!min
2594 || !max
2595 || TREE_CODE (min) != INTEGER_CST
2596 || TREE_CODE (max) != INTEGER_CST)
2597 return false;
2599 if (tree_int_cst_lt (range_min, min)
2600 || tree_int_cst_lt (max, range_max))
2601 return false;
2603 return true;
2606 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2607 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2608 This routine expects only GIMPLE lhs or rhs input. */
2610 bool
2611 tree_could_trap_p (tree expr)
2613 enum tree_code code;
2614 bool fp_operation = false;
2615 bool honor_trapv = false;
2616 tree t, base, div = NULL_TREE;
2618 if (!expr)
2619 return false;
2621 code = TREE_CODE (expr);
2622 t = TREE_TYPE (expr);
2624 if (t)
2626 if (COMPARISON_CLASS_P (expr))
2627 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2628 else
2629 fp_operation = FLOAT_TYPE_P (t);
2630 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2633 if (TREE_CODE_CLASS (code) == tcc_binary)
2634 div = TREE_OPERAND (expr, 1);
2635 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2636 return true;
2638 restart:
2639 switch (code)
2641 case COMPONENT_REF:
2642 case REALPART_EXPR:
2643 case IMAGPART_EXPR:
2644 case BIT_FIELD_REF:
2645 case VIEW_CONVERT_EXPR:
2646 case WITH_SIZE_EXPR:
2647 expr = TREE_OPERAND (expr, 0);
2648 code = TREE_CODE (expr);
2649 goto restart;
2651 case ARRAY_RANGE_REF:
2652 base = TREE_OPERAND (expr, 0);
2653 if (tree_could_trap_p (base))
2654 return true;
2655 if (TREE_THIS_NOTRAP (expr))
2656 return false;
2657 return !range_in_array_bounds_p (expr);
2659 case ARRAY_REF:
2660 base = TREE_OPERAND (expr, 0);
2661 if (tree_could_trap_p (base))
2662 return true;
2663 if (TREE_THIS_NOTRAP (expr))
2664 return false;
2665 return !in_array_bounds_p (expr);
2667 case TARGET_MEM_REF:
2668 case MEM_REF:
2669 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2670 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2671 return true;
2672 if (TREE_THIS_NOTRAP (expr))
2673 return false;
2674 /* We cannot prove that the access is in-bounds when we have
2675 variable-index TARGET_MEM_REFs. */
2676 if (code == TARGET_MEM_REF
2677 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2678 return true;
2679 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2681 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2682 offset_int off = mem_ref_offset (expr);
2683 if (wi::neg_p (off, SIGNED))
2684 return true;
2685 if (TREE_CODE (base) == STRING_CST)
2686 return wi::leu_p (TREE_STRING_LENGTH (base), off);
2687 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2688 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2689 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
2690 return true;
2691 /* Now we are sure the first byte of the access is inside
2692 the object. */
2693 return false;
2695 return true;
2697 case INDIRECT_REF:
2698 return !TREE_THIS_NOTRAP (expr);
2700 case ASM_EXPR:
2701 return TREE_THIS_VOLATILE (expr);
2703 case CALL_EXPR:
2704 t = get_callee_fndecl (expr);
2705 /* Assume that calls to weak functions may trap. */
2706 if (!t || !DECL_P (t))
2707 return true;
2708 if (DECL_WEAK (t))
2709 return tree_could_trap_p (t);
2710 return false;
2712 case FUNCTION_DECL:
2713 /* Assume that accesses to weak functions may trap, unless we know
2714 they are certainly defined in current TU or in some other
2715 LTO partition. */
2716 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2718 cgraph_node *node = cgraph_node::get (expr);
2719 if (node)
2720 node = node->function_symbol ();
2721 return !(node && node->in_other_partition);
2723 return false;
2725 case VAR_DECL:
2726 /* Assume that accesses to weak vars may trap, unless we know
2727 they are certainly defined in current TU or in some other
2728 LTO partition. */
2729 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2731 varpool_node *node = varpool_node::get (expr);
2732 if (node)
2733 node = node->ultimate_alias_target ();
2734 return !(node && node->in_other_partition);
2736 return false;
2738 default:
2739 return false;
2744 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2745 an assignment or a conditional) may throw. */
2747 static bool
2748 stmt_could_throw_1_p (gimple stmt)
2750 enum tree_code code = gimple_expr_code (stmt);
2751 bool honor_nans = false;
2752 bool honor_snans = false;
2753 bool fp_operation = false;
2754 bool honor_trapv = false;
2755 tree t;
2756 size_t i;
2757 bool handled, ret;
2759 if (TREE_CODE_CLASS (code) == tcc_comparison
2760 || TREE_CODE_CLASS (code) == tcc_unary
2761 || TREE_CODE_CLASS (code) == tcc_binary)
2763 if (is_gimple_assign (stmt)
2764 && TREE_CODE_CLASS (code) == tcc_comparison)
2765 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2766 else if (gimple_code (stmt) == GIMPLE_COND)
2767 t = TREE_TYPE (gimple_cond_lhs (stmt));
2768 else
2769 t = gimple_expr_type (stmt);
2770 fp_operation = FLOAT_TYPE_P (t);
2771 if (fp_operation)
2773 honor_nans = flag_trapping_math && !flag_finite_math_only;
2774 honor_snans = flag_signaling_nans != 0;
2776 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2777 honor_trapv = true;
2780 /* Check if the main expression may trap. */
2781 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2782 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2783 honor_nans, honor_snans, t,
2784 &handled);
2785 if (handled)
2786 return ret;
2788 /* If the expression does not trap, see if any of the individual operands may
2789 trap. */
2790 for (i = 0; i < gimple_num_ops (stmt); i++)
2791 if (tree_could_trap_p (gimple_op (stmt, i)))
2792 return true;
2794 return false;
2798 /* Return true if statement STMT could throw an exception. */
2800 bool
2801 stmt_could_throw_p (gimple stmt)
2803 if (!flag_exceptions)
2804 return false;
2806 /* The only statements that can throw an exception are assignments,
2807 conditionals, calls, resx, and asms. */
2808 switch (gimple_code (stmt))
2810 case GIMPLE_RESX:
2811 return true;
2813 case GIMPLE_CALL:
2814 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
2816 case GIMPLE_ASSIGN:
2817 case GIMPLE_COND:
2818 if (!cfun->can_throw_non_call_exceptions)
2819 return false;
2820 return stmt_could_throw_1_p (stmt);
2822 case GIMPLE_ASM:
2823 if (!cfun->can_throw_non_call_exceptions)
2824 return false;
2825 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
2827 default:
2828 return false;
2833 /* Return true if expression T could throw an exception. */
2835 bool
2836 tree_could_throw_p (tree t)
2838 if (!flag_exceptions)
2839 return false;
2840 if (TREE_CODE (t) == MODIFY_EXPR)
2842 if (cfun->can_throw_non_call_exceptions
2843 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2844 return true;
2845 t = TREE_OPERAND (t, 1);
2848 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2849 t = TREE_OPERAND (t, 0);
2850 if (TREE_CODE (t) == CALL_EXPR)
2851 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2852 if (cfun->can_throw_non_call_exceptions)
2853 return tree_could_trap_p (t);
2854 return false;
2857 /* Return true if STMT can throw an exception that is not caught within
2858 the current function (CFUN). */
2860 bool
2861 stmt_can_throw_external (gimple stmt)
2863 int lp_nr;
2865 if (!stmt_could_throw_p (stmt))
2866 return false;
2868 lp_nr = lookup_stmt_eh_lp (stmt);
2869 return lp_nr == 0;
2872 /* Return true if STMT can throw an exception that is caught within
2873 the current function (CFUN). */
2875 bool
2876 stmt_can_throw_internal (gimple stmt)
2878 int lp_nr;
2880 if (!stmt_could_throw_p (stmt))
2881 return false;
2883 lp_nr = lookup_stmt_eh_lp (stmt);
2884 return lp_nr > 0;
2887 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2888 remove any entry it might have from the EH table. Return true if
2889 any change was made. */
2891 bool
2892 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2894 if (stmt_could_throw_p (stmt))
2895 return false;
2896 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2899 /* Likewise, but always use the current function. */
2901 bool
2902 maybe_clean_eh_stmt (gimple stmt)
2904 return maybe_clean_eh_stmt_fn (cfun, stmt);
2907 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2908 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2909 in the table if it should be in there. Return TRUE if a replacement was
2910 done that my require an EH edge purge. */
2912 bool
2913 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2915 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2917 if (lp_nr != 0)
2919 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2921 if (new_stmt == old_stmt && new_stmt_could_throw)
2922 return false;
2924 remove_stmt_from_eh_lp (old_stmt);
2925 if (new_stmt_could_throw)
2927 add_stmt_to_eh_lp (new_stmt, lp_nr);
2928 return false;
2930 else
2931 return true;
2934 return false;
2937 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2938 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2939 operand is the return value of duplicate_eh_regions. */
2941 bool
2942 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2943 struct function *old_fun, gimple old_stmt,
2944 hash_map<void *, void *> *map,
2945 int default_lp_nr)
2947 int old_lp_nr, new_lp_nr;
2949 if (!stmt_could_throw_p (new_stmt))
2950 return false;
2952 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2953 if (old_lp_nr == 0)
2955 if (default_lp_nr == 0)
2956 return false;
2957 new_lp_nr = default_lp_nr;
2959 else if (old_lp_nr > 0)
2961 eh_landing_pad old_lp, new_lp;
2963 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2964 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
2965 new_lp_nr = new_lp->index;
2967 else
2969 eh_region old_r, new_r;
2971 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2972 new_r = static_cast<eh_region> (*map->get (old_r));
2973 new_lp_nr = -new_r->index;
2976 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2977 return true;
2980 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2981 and thus no remapping is required. */
2983 bool
2984 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2986 int lp_nr;
2988 if (!stmt_could_throw_p (new_stmt))
2989 return false;
2991 lp_nr = lookup_stmt_eh_lp (old_stmt);
2992 if (lp_nr == 0)
2993 return false;
2995 add_stmt_to_eh_lp (new_stmt, lp_nr);
2996 return true;
2999 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3000 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3001 this only handles handlers consisting of a single call, as that's the
3002 important case for C++: a destructor call for a particular object showing
3003 up in multiple handlers. */
3005 static bool
3006 same_handler_p (gimple_seq oneh, gimple_seq twoh)
3008 gimple_stmt_iterator gsi;
3009 gimple ones, twos;
3010 unsigned int ai;
3012 gsi = gsi_start (oneh);
3013 if (!gsi_one_before_end_p (gsi))
3014 return false;
3015 ones = gsi_stmt (gsi);
3017 gsi = gsi_start (twoh);
3018 if (!gsi_one_before_end_p (gsi))
3019 return false;
3020 twos = gsi_stmt (gsi);
3022 if (!is_gimple_call (ones)
3023 || !is_gimple_call (twos)
3024 || gimple_call_lhs (ones)
3025 || gimple_call_lhs (twos)
3026 || gimple_call_chain (ones)
3027 || gimple_call_chain (twos)
3028 || !gimple_call_same_target_p (ones, twos)
3029 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3030 return false;
3032 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3033 if (!operand_equal_p (gimple_call_arg (ones, ai),
3034 gimple_call_arg (twos, ai), 0))
3035 return false;
3037 return true;
3040 /* Optimize
3041 try { A() } finally { try { ~B() } catch { ~A() } }
3042 try { ... } finally { ~A() }
3043 into
3044 try { A() } catch { ~B() }
3045 try { ~B() ... } finally { ~A() }
3047 This occurs frequently in C++, where A is a local variable and B is a
3048 temporary used in the initializer for A. */
3050 static void
3051 optimize_double_finally (gtry *one, gtry *two)
3053 gimple oneh;
3054 gimple_stmt_iterator gsi;
3055 gimple_seq cleanup;
3057 cleanup = gimple_try_cleanup (one);
3058 gsi = gsi_start (cleanup);
3059 if (!gsi_one_before_end_p (gsi))
3060 return;
3062 oneh = gsi_stmt (gsi);
3063 if (gimple_code (oneh) != GIMPLE_TRY
3064 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3065 return;
3067 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3069 gimple_seq seq = gimple_try_eval (oneh);
3071 gimple_try_set_cleanup (one, seq);
3072 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3073 seq = copy_gimple_seq_and_replace_locals (seq);
3074 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3075 gimple_try_set_eval (two, seq);
3079 /* Perform EH refactoring optimizations that are simpler to do when code
3080 flow has been lowered but EH structures haven't. */
3082 static void
3083 refactor_eh_r (gimple_seq seq)
3085 gimple_stmt_iterator gsi;
3086 gimple one, two;
3088 one = NULL;
3089 two = NULL;
3090 gsi = gsi_start (seq);
3091 while (1)
3093 one = two;
3094 if (gsi_end_p (gsi))
3095 two = NULL;
3096 else
3097 two = gsi_stmt (gsi);
3098 if (one && two)
3099 if (gtry *try_one = dyn_cast <gtry *> (one))
3100 if (gtry *try_two = dyn_cast <gtry *> (two))
3101 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3102 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3103 optimize_double_finally (try_one, try_two);
3104 if (one)
3105 switch (gimple_code (one))
3107 case GIMPLE_TRY:
3108 refactor_eh_r (gimple_try_eval (one));
3109 refactor_eh_r (gimple_try_cleanup (one));
3110 break;
3111 case GIMPLE_CATCH:
3112 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
3113 break;
3114 case GIMPLE_EH_FILTER:
3115 refactor_eh_r (gimple_eh_filter_failure (one));
3116 break;
3117 case GIMPLE_EH_ELSE:
3119 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3120 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3121 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3123 break;
3124 default:
3125 break;
3127 if (two)
3128 gsi_next (&gsi);
3129 else
3130 break;
3134 namespace {
3136 const pass_data pass_data_refactor_eh =
3138 GIMPLE_PASS, /* type */
3139 "ehopt", /* name */
3140 OPTGROUP_NONE, /* optinfo_flags */
3141 TV_TREE_EH, /* tv_id */
3142 PROP_gimple_lcf, /* properties_required */
3143 0, /* properties_provided */
3144 0, /* properties_destroyed */
3145 0, /* todo_flags_start */
3146 0, /* todo_flags_finish */
3149 class pass_refactor_eh : public gimple_opt_pass
3151 public:
3152 pass_refactor_eh (gcc::context *ctxt)
3153 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3156 /* opt_pass methods: */
3157 virtual bool gate (function *) { return flag_exceptions != 0; }
3158 virtual unsigned int execute (function *)
3160 refactor_eh_r (gimple_body (current_function_decl));
3161 return 0;
3164 }; // class pass_refactor_eh
3166 } // anon namespace
3168 gimple_opt_pass *
3169 make_pass_refactor_eh (gcc::context *ctxt)
3171 return new pass_refactor_eh (ctxt);
3174 /* At the end of gimple optimization, we can lower RESX. */
3176 static bool
3177 lower_resx (basic_block bb, gresx *stmt,
3178 hash_map<eh_region, tree> *mnt_map)
3180 int lp_nr;
3181 eh_region src_r, dst_r;
3182 gimple_stmt_iterator gsi;
3183 gimple x;
3184 tree fn, src_nr;
3185 bool ret = false;
3187 lp_nr = lookup_stmt_eh_lp (stmt);
3188 if (lp_nr != 0)
3189 dst_r = get_eh_region_from_lp_number (lp_nr);
3190 else
3191 dst_r = NULL;
3193 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3194 gsi = gsi_last_bb (bb);
3196 if (src_r == NULL)
3198 /* We can wind up with no source region when pass_cleanup_eh shows
3199 that there are no entries into an eh region and deletes it, but
3200 then the block that contains the resx isn't removed. This can
3201 happen without optimization when the switch statement created by
3202 lower_try_finally_switch isn't simplified to remove the eh case.
3204 Resolve this by expanding the resx node to an abort. */
3206 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3207 x = gimple_build_call (fn, 0);
3208 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3210 while (EDGE_COUNT (bb->succs) > 0)
3211 remove_edge (EDGE_SUCC (bb, 0));
3213 else if (dst_r)
3215 /* When we have a destination region, we resolve this by copying
3216 the excptr and filter values into place, and changing the edge
3217 to immediately after the landing pad. */
3218 edge e;
3220 if (lp_nr < 0)
3222 basic_block new_bb;
3223 tree lab;
3225 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3226 the failure decl into a new block, if needed. */
3227 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3229 tree *slot = mnt_map->get (dst_r);
3230 if (slot == NULL)
3232 gimple_stmt_iterator gsi2;
3234 new_bb = create_empty_bb (bb);
3235 add_bb_to_loop (new_bb, bb->loop_father);
3236 lab = gimple_block_label (new_bb);
3237 gsi2 = gsi_start_bb (new_bb);
3239 fn = dst_r->u.must_not_throw.failure_decl;
3240 x = gimple_build_call (fn, 0);
3241 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3242 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3244 mnt_map->put (dst_r, lab);
3246 else
3248 lab = *slot;
3249 new_bb = label_to_block (lab);
3252 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3253 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3254 e->count = bb->count;
3255 e->probability = REG_BR_PROB_BASE;
3257 else
3259 edge_iterator ei;
3260 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3262 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3263 src_nr = build_int_cst (integer_type_node, src_r->index);
3264 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3265 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3267 /* Update the flags for the outgoing edge. */
3268 e = single_succ_edge (bb);
3269 gcc_assert (e->flags & EDGE_EH);
3270 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3272 /* If there are no more EH users of the landing pad, delete it. */
3273 FOR_EACH_EDGE (e, ei, e->dest->preds)
3274 if (e->flags & EDGE_EH)
3275 break;
3276 if (e == NULL)
3278 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3279 remove_eh_landing_pad (lp);
3283 ret = true;
3285 else
3287 tree var;
3289 /* When we don't have a destination region, this exception escapes
3290 up the call chain. We resolve this by generating a call to the
3291 _Unwind_Resume library function. */
3293 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3294 with no arguments for C++ and Java. Check for that. */
3295 if (src_r->use_cxa_end_cleanup)
3297 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3298 x = gimple_build_call (fn, 0);
3299 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3301 else
3303 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3304 src_nr = build_int_cst (integer_type_node, src_r->index);
3305 x = gimple_build_call (fn, 1, src_nr);
3306 var = create_tmp_var (ptr_type_node);
3307 var = make_ssa_name (var, x);
3308 gimple_call_set_lhs (x, var);
3309 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3311 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3312 x = gimple_build_call (fn, 1, var);
3313 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3316 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3319 gsi_remove (&gsi, true);
3321 return ret;
3324 namespace {
3326 const pass_data pass_data_lower_resx =
3328 GIMPLE_PASS, /* type */
3329 "resx", /* name */
3330 OPTGROUP_NONE, /* optinfo_flags */
3331 TV_TREE_EH, /* tv_id */
3332 PROP_gimple_lcf, /* properties_required */
3333 0, /* properties_provided */
3334 0, /* properties_destroyed */
3335 0, /* todo_flags_start */
3336 0, /* todo_flags_finish */
3339 class pass_lower_resx : public gimple_opt_pass
3341 public:
3342 pass_lower_resx (gcc::context *ctxt)
3343 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3346 /* opt_pass methods: */
3347 virtual bool gate (function *) { return flag_exceptions != 0; }
3348 virtual unsigned int execute (function *);
3350 }; // class pass_lower_resx
3352 unsigned
3353 pass_lower_resx::execute (function *fun)
3355 basic_block bb;
3356 bool dominance_invalidated = false;
3357 bool any_rewritten = false;
3359 hash_map<eh_region, tree> mnt_map;
3361 FOR_EACH_BB_FN (bb, fun)
3363 gimple last = last_stmt (bb);
3364 if (last && is_gimple_resx (last))
3366 dominance_invalidated |=
3367 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
3368 any_rewritten = true;
3372 if (dominance_invalidated)
3374 free_dominance_info (CDI_DOMINATORS);
3375 free_dominance_info (CDI_POST_DOMINATORS);
3378 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3381 } // anon namespace
3383 gimple_opt_pass *
3384 make_pass_lower_resx (gcc::context *ctxt)
3386 return new pass_lower_resx (ctxt);
3389 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3390 external throw. */
3392 static void
3393 optimize_clobbers (basic_block bb)
3395 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3396 bool any_clobbers = false;
3397 bool seen_stack_restore = false;
3398 edge_iterator ei;
3399 edge e;
3401 /* Only optimize anything if the bb contains at least one clobber,
3402 ends with resx (checked by caller), optionally contains some
3403 debug stmts or labels, or at most one __builtin_stack_restore
3404 call, and has an incoming EH edge. */
3405 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3407 gimple stmt = gsi_stmt (gsi);
3408 if (is_gimple_debug (stmt))
3409 continue;
3410 if (gimple_clobber_p (stmt))
3412 any_clobbers = true;
3413 continue;
3415 if (!seen_stack_restore
3416 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3418 seen_stack_restore = true;
3419 continue;
3421 if (gimple_code (stmt) == GIMPLE_LABEL)
3422 break;
3423 return;
3425 if (!any_clobbers)
3426 return;
3427 FOR_EACH_EDGE (e, ei, bb->preds)
3428 if (e->flags & EDGE_EH)
3429 break;
3430 if (e == NULL)
3431 return;
3432 gsi = gsi_last_bb (bb);
3433 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3435 gimple stmt = gsi_stmt (gsi);
3436 if (!gimple_clobber_p (stmt))
3437 continue;
3438 unlink_stmt_vdef (stmt);
3439 gsi_remove (&gsi, true);
3440 release_defs (stmt);
3444 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3445 internal throw to successor BB. */
3447 static int
3448 sink_clobbers (basic_block bb)
3450 edge e;
3451 edge_iterator ei;
3452 gimple_stmt_iterator gsi, dgsi;
3453 basic_block succbb;
3454 bool any_clobbers = false;
3455 unsigned todo = 0;
3457 /* Only optimize if BB has a single EH successor and
3458 all predecessor edges are EH too. */
3459 if (!single_succ_p (bb)
3460 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3461 return 0;
3463 FOR_EACH_EDGE (e, ei, bb->preds)
3465 if ((e->flags & EDGE_EH) == 0)
3466 return 0;
3469 /* And BB contains only CLOBBER stmts before the final
3470 RESX. */
3471 gsi = gsi_last_bb (bb);
3472 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3474 gimple stmt = gsi_stmt (gsi);
3475 if (is_gimple_debug (stmt))
3476 continue;
3477 if (gimple_code (stmt) == GIMPLE_LABEL)
3478 break;
3479 if (!gimple_clobber_p (stmt))
3480 return 0;
3481 any_clobbers = true;
3483 if (!any_clobbers)
3484 return 0;
3486 edge succe = single_succ_edge (bb);
3487 succbb = succe->dest;
3489 /* See if there is a virtual PHI node to take an updated virtual
3490 operand from. */
3491 gphi *vphi = NULL;
3492 tree vuse = NULL_TREE;
3493 for (gphi_iterator gpi = gsi_start_phis (succbb);
3494 !gsi_end_p (gpi); gsi_next (&gpi))
3496 tree res = gimple_phi_result (gpi.phi ());
3497 if (virtual_operand_p (res))
3499 vphi = gpi.phi ();
3500 vuse = res;
3501 break;
3505 dgsi = gsi_after_labels (succbb);
3506 gsi = gsi_last_bb (bb);
3507 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3509 gimple stmt = gsi_stmt (gsi);
3510 tree lhs;
3511 if (is_gimple_debug (stmt))
3512 continue;
3513 if (gimple_code (stmt) == GIMPLE_LABEL)
3514 break;
3515 lhs = gimple_assign_lhs (stmt);
3516 /* Unfortunately we don't have dominance info updated at this
3517 point, so checking if
3518 dominated_by_p (CDI_DOMINATORS, succbb,
3519 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3520 would be too costly. Thus, avoid sinking any clobbers that
3521 refer to non-(D) SSA_NAMEs. */
3522 if (TREE_CODE (lhs) == MEM_REF
3523 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3524 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3526 unlink_stmt_vdef (stmt);
3527 gsi_remove (&gsi, true);
3528 release_defs (stmt);
3529 continue;
3532 /* As we do not change stmt order when sinking across a
3533 forwarder edge we can keep virtual operands in place. */
3534 gsi_remove (&gsi, false);
3535 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3537 /* But adjust virtual operands if we sunk across a PHI node. */
3538 if (vuse)
3540 gimple use_stmt;
3541 imm_use_iterator iter;
3542 use_operand_p use_p;
3543 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3544 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3545 SET_USE (use_p, gimple_vdef (stmt));
3546 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3548 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3549 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3551 /* Adjust the incoming virtual operand. */
3552 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3553 SET_USE (gimple_vuse_op (stmt), vuse);
3555 /* If there isn't a single predecessor but no virtual PHI node
3556 arrange for virtual operands to be renamed. */
3557 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3558 && !single_pred_p (succbb))
3560 /* In this case there will be no use of the VDEF of this stmt.
3561 ??? Unless this is a secondary opportunity and we have not
3562 removed unreachable blocks yet, so we cannot assert this.
3563 Which also means we will end up renaming too many times. */
3564 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3565 mark_virtual_operands_for_renaming (cfun);
3566 todo |= TODO_update_ssa_only_virtuals;
3570 return todo;
3573 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3574 we have found some duplicate labels and removed some edges. */
3576 static bool
3577 lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3579 gimple_stmt_iterator gsi;
3580 int region_nr;
3581 eh_region r;
3582 tree filter, fn;
3583 gimple x;
3584 bool redirected = false;
3586 region_nr = gimple_eh_dispatch_region (stmt);
3587 r = get_eh_region_from_number (region_nr);
3589 gsi = gsi_last_bb (src);
3591 switch (r->type)
3593 case ERT_TRY:
3595 auto_vec<tree> labels;
3596 tree default_label = NULL;
3597 eh_catch c;
3598 edge_iterator ei;
3599 edge e;
3600 hash_set<tree> seen_values;
3602 /* Collect the labels for a switch. Zero the post_landing_pad
3603 field becase we'll no longer have anything keeping these labels
3604 in existence and the optimizer will be free to merge these
3605 blocks at will. */
3606 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3608 tree tp_node, flt_node, lab = c->label;
3609 bool have_label = false;
3611 c->label = NULL;
3612 tp_node = c->type_list;
3613 flt_node = c->filter_list;
3615 if (tp_node == NULL)
3617 default_label = lab;
3618 break;
3622 /* Filter out duplicate labels that arise when this handler
3623 is shadowed by an earlier one. When no labels are
3624 attached to the handler anymore, we remove
3625 the corresponding edge and then we delete unreachable
3626 blocks at the end of this pass. */
3627 if (! seen_values.contains (TREE_VALUE (flt_node)))
3629 tree t = build_case_label (TREE_VALUE (flt_node),
3630 NULL, lab);
3631 labels.safe_push (t);
3632 seen_values.add (TREE_VALUE (flt_node));
3633 have_label = true;
3636 tp_node = TREE_CHAIN (tp_node);
3637 flt_node = TREE_CHAIN (flt_node);
3639 while (tp_node);
3640 if (! have_label)
3642 remove_edge (find_edge (src, label_to_block (lab)));
3643 redirected = true;
3647 /* Clean up the edge flags. */
3648 FOR_EACH_EDGE (e, ei, src->succs)
3650 if (e->flags & EDGE_FALLTHRU)
3652 /* If there was no catch-all, use the fallthru edge. */
3653 if (default_label == NULL)
3654 default_label = gimple_block_label (e->dest);
3655 e->flags &= ~EDGE_FALLTHRU;
3658 gcc_assert (default_label != NULL);
3660 /* Don't generate a switch if there's only a default case.
3661 This is common in the form of try { A; } catch (...) { B; }. */
3662 if (!labels.exists ())
3664 e = single_succ_edge (src);
3665 e->flags |= EDGE_FALLTHRU;
3667 else
3669 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3670 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3671 region_nr));
3672 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3673 filter = make_ssa_name (filter, x);
3674 gimple_call_set_lhs (x, filter);
3675 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3677 /* Turn the default label into a default case. */
3678 default_label = build_case_label (NULL, NULL, default_label);
3679 sort_case_labels (labels);
3681 x = gimple_build_switch (filter, default_label, labels);
3682 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3685 break;
3687 case ERT_ALLOWED_EXCEPTIONS:
3689 edge b_e = BRANCH_EDGE (src);
3690 edge f_e = FALLTHRU_EDGE (src);
3692 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3693 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3694 region_nr));
3695 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3696 filter = make_ssa_name (filter, x);
3697 gimple_call_set_lhs (x, filter);
3698 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3700 r->u.allowed.label = NULL;
3701 x = gimple_build_cond (EQ_EXPR, filter,
3702 build_int_cst (TREE_TYPE (filter),
3703 r->u.allowed.filter),
3704 NULL_TREE, NULL_TREE);
3705 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3707 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3708 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3710 break;
3712 default:
3713 gcc_unreachable ();
3716 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3717 gsi_remove (&gsi, true);
3718 return redirected;
3721 namespace {
3723 const pass_data pass_data_lower_eh_dispatch =
3725 GIMPLE_PASS, /* type */
3726 "ehdisp", /* name */
3727 OPTGROUP_NONE, /* optinfo_flags */
3728 TV_TREE_EH, /* tv_id */
3729 PROP_gimple_lcf, /* properties_required */
3730 0, /* properties_provided */
3731 0, /* properties_destroyed */
3732 0, /* todo_flags_start */
3733 0, /* todo_flags_finish */
3736 class pass_lower_eh_dispatch : public gimple_opt_pass
3738 public:
3739 pass_lower_eh_dispatch (gcc::context *ctxt)
3740 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3743 /* opt_pass methods: */
3744 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3745 virtual unsigned int execute (function *);
3747 }; // class pass_lower_eh_dispatch
3749 unsigned
3750 pass_lower_eh_dispatch::execute (function *fun)
3752 basic_block bb;
3753 int flags = 0;
3754 bool redirected = false;
3756 assign_filter_values ();
3758 FOR_EACH_BB_FN (bb, fun)
3760 gimple last = last_stmt (bb);
3761 if (last == NULL)
3762 continue;
3763 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3765 redirected |= lower_eh_dispatch (bb,
3766 as_a <geh_dispatch *> (last));
3767 flags |= TODO_update_ssa_only_virtuals;
3769 else if (gimple_code (last) == GIMPLE_RESX)
3771 if (stmt_can_throw_external (last))
3772 optimize_clobbers (bb);
3773 else
3774 flags |= sink_clobbers (bb);
3778 if (redirected)
3779 delete_unreachable_blocks ();
3780 return flags;
3783 } // anon namespace
3785 gimple_opt_pass *
3786 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3788 return new pass_lower_eh_dispatch (ctxt);
3791 /* Walk statements, see what regions and, optionally, landing pads
3792 are really referenced.
3794 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3795 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3797 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3798 regions are marked.
3800 The caller is responsible for freeing the returned sbitmaps. */
3802 static void
3803 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3805 sbitmap r_reachable, lp_reachable;
3806 basic_block bb;
3807 bool mark_landing_pads = (lp_reachablep != NULL);
3808 gcc_checking_assert (r_reachablep != NULL);
3810 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3811 bitmap_clear (r_reachable);
3812 *r_reachablep = r_reachable;
3814 if (mark_landing_pads)
3816 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3817 bitmap_clear (lp_reachable);
3818 *lp_reachablep = lp_reachable;
3820 else
3821 lp_reachable = NULL;
3823 FOR_EACH_BB_FN (bb, cfun)
3825 gimple_stmt_iterator gsi;
3827 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3829 gimple stmt = gsi_stmt (gsi);
3831 if (mark_landing_pads)
3833 int lp_nr = lookup_stmt_eh_lp (stmt);
3835 /* Negative LP numbers are MUST_NOT_THROW regions which
3836 are not considered BB enders. */
3837 if (lp_nr < 0)
3838 bitmap_set_bit (r_reachable, -lp_nr);
3840 /* Positive LP numbers are real landing pads, and BB enders. */
3841 else if (lp_nr > 0)
3843 gcc_assert (gsi_one_before_end_p (gsi));
3844 eh_region region = get_eh_region_from_lp_number (lp_nr);
3845 bitmap_set_bit (r_reachable, region->index);
3846 bitmap_set_bit (lp_reachable, lp_nr);
3850 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3851 switch (gimple_code (stmt))
3853 case GIMPLE_RESX:
3854 bitmap_set_bit (r_reachable,
3855 gimple_resx_region (as_a <gresx *> (stmt)));
3856 break;
3857 case GIMPLE_EH_DISPATCH:
3858 bitmap_set_bit (r_reachable,
3859 gimple_eh_dispatch_region (
3860 as_a <geh_dispatch *> (stmt)));
3861 break;
3862 case GIMPLE_CALL:
3863 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
3864 for (int i = 0; i < 2; ++i)
3866 tree rt = gimple_call_arg (stmt, i);
3867 HOST_WIDE_INT ri = tree_to_shwi (rt);
3869 gcc_assert (ri = (int)ri);
3870 bitmap_set_bit (r_reachable, ri);
3872 break;
3873 default:
3874 break;
3880 /* Remove unreachable handlers and unreachable landing pads. */
3882 static void
3883 remove_unreachable_handlers (void)
3885 sbitmap r_reachable, lp_reachable;
3886 eh_region region;
3887 eh_landing_pad lp;
3888 unsigned i;
3890 mark_reachable_handlers (&r_reachable, &lp_reachable);
3892 if (dump_file)
3894 fprintf (dump_file, "Before removal of unreachable regions:\n");
3895 dump_eh_tree (dump_file, cfun);
3896 fprintf (dump_file, "Reachable regions: ");
3897 dump_bitmap_file (dump_file, r_reachable);
3898 fprintf (dump_file, "Reachable landing pads: ");
3899 dump_bitmap_file (dump_file, lp_reachable);
3902 if (dump_file)
3904 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3905 if (region && !bitmap_bit_p (r_reachable, region->index))
3906 fprintf (dump_file,
3907 "Removing unreachable region %d\n",
3908 region->index);
3911 remove_unreachable_eh_regions (r_reachable);
3913 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3914 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3916 if (dump_file)
3917 fprintf (dump_file,
3918 "Removing unreachable landing pad %d\n",
3919 lp->index);
3920 remove_eh_landing_pad (lp);
3923 if (dump_file)
3925 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3926 dump_eh_tree (dump_file, cfun);
3927 fprintf (dump_file, "\n\n");
3930 sbitmap_free (r_reachable);
3931 sbitmap_free (lp_reachable);
3933 #ifdef ENABLE_CHECKING
3934 verify_eh_tree (cfun);
3935 #endif
3938 /* Remove unreachable handlers if any landing pads have been removed after
3939 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3941 void
3942 maybe_remove_unreachable_handlers (void)
3944 eh_landing_pad lp;
3945 unsigned i;
3947 if (cfun->eh == NULL)
3948 return;
3950 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3951 if (lp && lp->post_landing_pad)
3953 if (label_to_block (lp->post_landing_pad) == NULL)
3955 remove_unreachable_handlers ();
3956 return;
3961 /* Remove regions that do not have landing pads. This assumes
3962 that remove_unreachable_handlers has already been run, and
3963 that we've just manipulated the landing pads since then.
3965 Preserve regions with landing pads and regions that prevent
3966 exceptions from propagating further, even if these regions
3967 are not reachable. */
3969 static void
3970 remove_unreachable_handlers_no_lp (void)
3972 eh_region region;
3973 sbitmap r_reachable;
3974 unsigned i;
3976 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3978 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3980 if (! region)
3981 continue;
3983 if (region->landing_pads != NULL
3984 || region->type == ERT_MUST_NOT_THROW)
3985 bitmap_set_bit (r_reachable, region->index);
3987 if (dump_file
3988 && !bitmap_bit_p (r_reachable, region->index))
3989 fprintf (dump_file,
3990 "Removing unreachable region %d\n",
3991 region->index);
3994 remove_unreachable_eh_regions (r_reachable);
3996 sbitmap_free (r_reachable);
3999 /* Undo critical edge splitting on an EH landing pad. Earlier, we
4000 optimisticaly split all sorts of edges, including EH edges. The
4001 optimization passes in between may not have needed them; if not,
4002 we should undo the split.
4004 Recognize this case by having one EH edge incoming to the BB and
4005 one normal edge outgoing; BB should be empty apart from the
4006 post_landing_pad label.
4008 Note that this is slightly different from the empty handler case
4009 handled by cleanup_empty_eh, in that the actual handler may yet
4010 have actual code but the landing pad has been separated from the
4011 handler. As such, cleanup_empty_eh relies on this transformation
4012 having been done first. */
4014 static bool
4015 unsplit_eh (eh_landing_pad lp)
4017 basic_block bb = label_to_block (lp->post_landing_pad);
4018 gimple_stmt_iterator gsi;
4019 edge e_in, e_out;
4021 /* Quickly check the edge counts on BB for singularity. */
4022 if (!single_pred_p (bb) || !single_succ_p (bb))
4023 return false;
4024 e_in = single_pred_edge (bb);
4025 e_out = single_succ_edge (bb);
4027 /* Input edge must be EH and output edge must be normal. */
4028 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4029 return false;
4031 /* The block must be empty except for the labels and debug insns. */
4032 gsi = gsi_after_labels (bb);
4033 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4034 gsi_next_nondebug (&gsi);
4035 if (!gsi_end_p (gsi))
4036 return false;
4038 /* The destination block must not already have a landing pad
4039 for a different region. */
4040 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4042 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4043 tree lab;
4044 int lp_nr;
4046 if (!label_stmt)
4047 break;
4048 lab = gimple_label_label (label_stmt);
4049 lp_nr = EH_LANDING_PAD_NR (lab);
4050 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4051 return false;
4054 /* The new destination block must not already be a destination of
4055 the source block, lest we merge fallthru and eh edges and get
4056 all sorts of confused. */
4057 if (find_edge (e_in->src, e_out->dest))
4058 return false;
4060 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4061 thought this should have been cleaned up by a phicprop pass, but
4062 that doesn't appear to handle virtuals. Propagate by hand. */
4063 if (!gimple_seq_empty_p (phi_nodes (bb)))
4065 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
4067 gimple use_stmt;
4068 gphi *phi = gpi.phi ();
4069 tree lhs = gimple_phi_result (phi);
4070 tree rhs = gimple_phi_arg_def (phi, 0);
4071 use_operand_p use_p;
4072 imm_use_iterator iter;
4074 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4076 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4077 SET_USE (use_p, rhs);
4080 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4081 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4083 remove_phi_node (&gpi, true);
4087 if (dump_file && (dump_flags & TDF_DETAILS))
4088 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4089 lp->index, e_out->dest->index);
4091 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4092 a successor edge, humor it. But do the real CFG change with the
4093 predecessor of E_OUT in order to preserve the ordering of arguments
4094 to the PHI nodes in E_OUT->DEST. */
4095 redirect_eh_edge_1 (e_in, e_out->dest, false);
4096 redirect_edge_pred (e_out, e_in->src);
4097 e_out->flags = e_in->flags;
4098 e_out->probability = e_in->probability;
4099 e_out->count = e_in->count;
4100 remove_edge (e_in);
4102 return true;
4105 /* Examine each landing pad block and see if it matches unsplit_eh. */
4107 static bool
4108 unsplit_all_eh (void)
4110 bool changed = false;
4111 eh_landing_pad lp;
4112 int i;
4114 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4115 if (lp)
4116 changed |= unsplit_eh (lp);
4118 return changed;
4121 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4122 to OLD_BB to NEW_BB; return true on success, false on failure.
4124 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4125 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4126 Virtual PHIs may be deleted and marked for renaming. */
4128 static bool
4129 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4130 edge old_bb_out, bool change_region)
4132 gphi_iterator ngsi, ogsi;
4133 edge_iterator ei;
4134 edge e;
4135 bitmap ophi_handled;
4137 /* The destination block must not be a regular successor for any
4138 of the preds of the landing pad. Thus, avoid turning
4139 <..>
4140 | \ EH
4141 | <..>
4143 <..>
4144 into
4145 <..>
4146 | | EH
4147 <..>
4148 which CFG verification would choke on. See PR45172 and PR51089. */
4149 FOR_EACH_EDGE (e, ei, old_bb->preds)
4150 if (find_edge (e->src, new_bb))
4151 return false;
4153 FOR_EACH_EDGE (e, ei, old_bb->preds)
4154 redirect_edge_var_map_clear (e);
4156 ophi_handled = BITMAP_ALLOC (NULL);
4158 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4159 for the edges we're going to move. */
4160 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4162 gphi *ophi, *nphi = ngsi.phi ();
4163 tree nresult, nop;
4165 nresult = gimple_phi_result (nphi);
4166 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4168 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4169 the source ssa_name. */
4170 ophi = NULL;
4171 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4173 ophi = ogsi.phi ();
4174 if (gimple_phi_result (ophi) == nop)
4175 break;
4176 ophi = NULL;
4179 /* If we did find the corresponding PHI, copy those inputs. */
4180 if (ophi)
4182 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4183 if (!has_single_use (nop))
4185 imm_use_iterator imm_iter;
4186 use_operand_p use_p;
4188 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4190 if (!gimple_debug_bind_p (USE_STMT (use_p))
4191 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4192 || gimple_bb (USE_STMT (use_p)) != new_bb))
4193 goto fail;
4196 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4197 FOR_EACH_EDGE (e, ei, old_bb->preds)
4199 location_t oloc;
4200 tree oop;
4202 if ((e->flags & EDGE_EH) == 0)
4203 continue;
4204 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4205 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4206 redirect_edge_var_map_add (e, nresult, oop, oloc);
4209 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4210 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4211 variable is unchanged from input to the block and we can simply
4212 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4213 else
4215 location_t nloc
4216 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4217 FOR_EACH_EDGE (e, ei, old_bb->preds)
4218 redirect_edge_var_map_add (e, nresult, nop, nloc);
4222 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4223 we don't know what values from the other edges into NEW_BB to use. */
4224 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4226 gphi *ophi = ogsi.phi ();
4227 tree oresult = gimple_phi_result (ophi);
4228 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4229 goto fail;
4232 /* Finally, move the edges and update the PHIs. */
4233 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4234 if (e->flags & EDGE_EH)
4236 /* ??? CFG manipluation routines do not try to update loop
4237 form on edge redirection. Do so manually here for now. */
4238 /* If we redirect a loop entry or latch edge that will either create
4239 a multiple entry loop or rotate the loop. If the loops merge
4240 we may have created a loop with multiple latches.
4241 All of this isn't easily fixed thus cancel the affected loop
4242 and mark the other loop as possibly having multiple latches. */
4243 if (e->dest == e->dest->loop_father->header)
4245 mark_loop_for_removal (e->dest->loop_father);
4246 new_bb->loop_father->latch = NULL;
4247 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4249 redirect_eh_edge_1 (e, new_bb, change_region);
4250 redirect_edge_succ (e, new_bb);
4251 flush_pending_stmts (e);
4253 else
4254 ei_next (&ei);
4256 BITMAP_FREE (ophi_handled);
4257 return true;
4259 fail:
4260 FOR_EACH_EDGE (e, ei, old_bb->preds)
4261 redirect_edge_var_map_clear (e);
4262 BITMAP_FREE (ophi_handled);
4263 return false;
4266 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4267 old region to NEW_REGION at BB. */
4269 static void
4270 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4271 eh_landing_pad lp, eh_region new_region)
4273 gimple_stmt_iterator gsi;
4274 eh_landing_pad *pp;
4276 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4277 continue;
4278 *pp = lp->next_lp;
4280 lp->region = new_region;
4281 lp->next_lp = new_region->landing_pads;
4282 new_region->landing_pads = lp;
4284 /* Delete the RESX that was matched within the empty handler block. */
4285 gsi = gsi_last_bb (bb);
4286 unlink_stmt_vdef (gsi_stmt (gsi));
4287 gsi_remove (&gsi, true);
4289 /* Clean up E_OUT for the fallthru. */
4290 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4291 e_out->probability = REG_BR_PROB_BASE;
4294 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4295 unsplitting than unsplit_eh was prepared to handle, e.g. when
4296 multiple incoming edges and phis are involved. */
4298 static bool
4299 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4301 gimple_stmt_iterator gsi;
4302 tree lab;
4304 /* We really ought not have totally lost everything following
4305 a landing pad label. Given that BB is empty, there had better
4306 be a successor. */
4307 gcc_assert (e_out != NULL);
4309 /* The destination block must not already have a landing pad
4310 for a different region. */
4311 lab = NULL;
4312 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4314 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4315 int lp_nr;
4317 if (!stmt)
4318 break;
4319 lab = gimple_label_label (stmt);
4320 lp_nr = EH_LANDING_PAD_NR (lab);
4321 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4322 return false;
4325 /* Attempt to move the PHIs into the successor block. */
4326 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4328 if (dump_file && (dump_flags & TDF_DETAILS))
4329 fprintf (dump_file,
4330 "Unsplit EH landing pad %d to block %i "
4331 "(via cleanup_empty_eh).\n",
4332 lp->index, e_out->dest->index);
4333 return true;
4336 return false;
4339 /* Return true if edge E_FIRST is part of an empty infinite loop
4340 or leads to such a loop through a series of single successor
4341 empty bbs. */
4343 static bool
4344 infinite_empty_loop_p (edge e_first)
4346 bool inf_loop = false;
4347 edge e;
4349 if (e_first->dest == e_first->src)
4350 return true;
4352 e_first->src->aux = (void *) 1;
4353 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4355 gimple_stmt_iterator gsi;
4356 if (e->dest->aux)
4358 inf_loop = true;
4359 break;
4361 e->dest->aux = (void *) 1;
4362 gsi = gsi_after_labels (e->dest);
4363 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4364 gsi_next_nondebug (&gsi);
4365 if (!gsi_end_p (gsi))
4366 break;
4368 e_first->src->aux = NULL;
4369 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4370 e->dest->aux = NULL;
4372 return inf_loop;
4375 /* Examine the block associated with LP to determine if it's an empty
4376 handler for its EH region. If so, attempt to redirect EH edges to
4377 an outer region. Return true the CFG was updated in any way. This
4378 is similar to jump forwarding, just across EH edges. */
4380 static bool
4381 cleanup_empty_eh (eh_landing_pad lp)
4383 basic_block bb = label_to_block (lp->post_landing_pad);
4384 gimple_stmt_iterator gsi;
4385 gimple resx;
4386 eh_region new_region;
4387 edge_iterator ei;
4388 edge e, e_out;
4389 bool has_non_eh_pred;
4390 bool ret = false;
4391 int new_lp_nr;
4393 /* There can be zero or one edges out of BB. This is the quickest test. */
4394 switch (EDGE_COUNT (bb->succs))
4396 case 0:
4397 e_out = NULL;
4398 break;
4399 case 1:
4400 e_out = single_succ_edge (bb);
4401 break;
4402 default:
4403 return false;
4406 resx = last_stmt (bb);
4407 if (resx && is_gimple_resx (resx))
4409 if (stmt_can_throw_external (resx))
4410 optimize_clobbers (bb);
4411 else if (sink_clobbers (bb))
4412 ret = true;
4415 gsi = gsi_after_labels (bb);
4417 /* Make sure to skip debug statements. */
4418 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4419 gsi_next_nondebug (&gsi);
4421 /* If the block is totally empty, look for more unsplitting cases. */
4422 if (gsi_end_p (gsi))
4424 /* For the degenerate case of an infinite loop bail out.
4425 If bb has no successors and is totally empty, which can happen e.g.
4426 because of incorrect noreturn attribute, bail out too. */
4427 if (e_out == NULL
4428 || infinite_empty_loop_p (e_out))
4429 return ret;
4431 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4434 /* The block should consist only of a single RESX statement, modulo a
4435 preceding call to __builtin_stack_restore if there is no outgoing
4436 edge, since the call can be eliminated in this case. */
4437 resx = gsi_stmt (gsi);
4438 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4440 gsi_next (&gsi);
4441 resx = gsi_stmt (gsi);
4443 if (!is_gimple_resx (resx))
4444 return ret;
4445 gcc_assert (gsi_one_before_end_p (gsi));
4447 /* Determine if there are non-EH edges, or resx edges into the handler. */
4448 has_non_eh_pred = false;
4449 FOR_EACH_EDGE (e, ei, bb->preds)
4450 if (!(e->flags & EDGE_EH))
4451 has_non_eh_pred = true;
4453 /* Find the handler that's outer of the empty handler by looking at
4454 where the RESX instruction was vectored. */
4455 new_lp_nr = lookup_stmt_eh_lp (resx);
4456 new_region = get_eh_region_from_lp_number (new_lp_nr);
4458 /* If there's no destination region within the current function,
4459 redirection is trivial via removing the throwing statements from
4460 the EH region, removing the EH edges, and allowing the block
4461 to go unreachable. */
4462 if (new_region == NULL)
4464 gcc_assert (e_out == NULL);
4465 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4466 if (e->flags & EDGE_EH)
4468 gimple stmt = last_stmt (e->src);
4469 remove_stmt_from_eh_lp (stmt);
4470 remove_edge (e);
4472 else
4473 ei_next (&ei);
4474 goto succeed;
4477 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4478 to handle the abort and allow the blocks to go unreachable. */
4479 if (new_region->type == ERT_MUST_NOT_THROW)
4481 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4482 if (e->flags & EDGE_EH)
4484 gimple stmt = last_stmt (e->src);
4485 remove_stmt_from_eh_lp (stmt);
4486 add_stmt_to_eh_lp (stmt, new_lp_nr);
4487 remove_edge (e);
4489 else
4490 ei_next (&ei);
4491 goto succeed;
4494 /* Try to redirect the EH edges and merge the PHIs into the destination
4495 landing pad block. If the merge succeeds, we'll already have redirected
4496 all the EH edges. The handler itself will go unreachable if there were
4497 no normal edges. */
4498 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4499 goto succeed;
4501 /* Finally, if all input edges are EH edges, then we can (potentially)
4502 reduce the number of transfers from the runtime by moving the landing
4503 pad from the original region to the new region. This is a win when
4504 we remove the last CLEANUP region along a particular exception
4505 propagation path. Since nothing changes except for the region with
4506 which the landing pad is associated, the PHI nodes do not need to be
4507 adjusted at all. */
4508 if (!has_non_eh_pred)
4510 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4511 if (dump_file && (dump_flags & TDF_DETAILS))
4512 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4513 lp->index, new_region->index);
4515 /* ??? The CFG didn't change, but we may have rendered the
4516 old EH region unreachable. Trigger a cleanup there. */
4517 return true;
4520 return ret;
4522 succeed:
4523 if (dump_file && (dump_flags & TDF_DETAILS))
4524 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4525 remove_eh_landing_pad (lp);
4526 return true;
4529 /* Do a post-order traversal of the EH region tree. Examine each
4530 post_landing_pad block and see if we can eliminate it as empty. */
4532 static bool
4533 cleanup_all_empty_eh (void)
4535 bool changed = false;
4536 eh_landing_pad lp;
4537 int i;
4539 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4540 if (lp)
4541 changed |= cleanup_empty_eh (lp);
4543 return changed;
4546 /* Perform cleanups and lowering of exception handling
4547 1) cleanups regions with handlers doing nothing are optimized out
4548 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4549 3) Info about regions that are containing instructions, and regions
4550 reachable via local EH edges is collected
4551 4) Eh tree is pruned for regions no longer necessary.
4553 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4554 Unify those that have the same failure decl and locus.
4557 static unsigned int
4558 execute_cleanup_eh_1 (void)
4560 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4561 looking up unreachable landing pads. */
4562 remove_unreachable_handlers ();
4564 /* Watch out for the region tree vanishing due to all unreachable. */
4565 if (cfun->eh->region_tree)
4567 bool changed = false;
4569 if (optimize)
4570 changed |= unsplit_all_eh ();
4571 changed |= cleanup_all_empty_eh ();
4573 if (changed)
4575 free_dominance_info (CDI_DOMINATORS);
4576 free_dominance_info (CDI_POST_DOMINATORS);
4578 /* We delayed all basic block deletion, as we may have performed
4579 cleanups on EH edges while non-EH edges were still present. */
4580 delete_unreachable_blocks ();
4582 /* We manipulated the landing pads. Remove any region that no
4583 longer has a landing pad. */
4584 remove_unreachable_handlers_no_lp ();
4586 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4590 return 0;
4593 namespace {
4595 const pass_data pass_data_cleanup_eh =
4597 GIMPLE_PASS, /* type */
4598 "ehcleanup", /* name */
4599 OPTGROUP_NONE, /* optinfo_flags */
4600 TV_TREE_EH, /* tv_id */
4601 PROP_gimple_lcf, /* properties_required */
4602 0, /* properties_provided */
4603 0, /* properties_destroyed */
4604 0, /* todo_flags_start */
4605 0, /* todo_flags_finish */
4608 class pass_cleanup_eh : public gimple_opt_pass
4610 public:
4611 pass_cleanup_eh (gcc::context *ctxt)
4612 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4615 /* opt_pass methods: */
4616 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4617 virtual bool gate (function *fun)
4619 return fun->eh != NULL && fun->eh->region_tree != NULL;
4622 virtual unsigned int execute (function *);
4624 }; // class pass_cleanup_eh
4626 unsigned int
4627 pass_cleanup_eh::execute (function *fun)
4629 int ret = execute_cleanup_eh_1 ();
4631 /* If the function no longer needs an EH personality routine
4632 clear it. This exposes cross-language inlining opportunities
4633 and avoids references to a never defined personality routine. */
4634 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4635 && function_needs_eh_personality (fun) != eh_personality_lang)
4636 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4638 return ret;
4641 } // anon namespace
4643 gimple_opt_pass *
4644 make_pass_cleanup_eh (gcc::context *ctxt)
4646 return new pass_cleanup_eh (ctxt);
4649 /* Verify that BB containing STMT as the last statement, has precisely the
4650 edge that make_eh_edges would create. */
4652 DEBUG_FUNCTION bool
4653 verify_eh_edges (gimple stmt)
4655 basic_block bb = gimple_bb (stmt);
4656 eh_landing_pad lp = NULL;
4657 int lp_nr;
4658 edge_iterator ei;
4659 edge e, eh_edge;
4661 lp_nr = lookup_stmt_eh_lp (stmt);
4662 if (lp_nr > 0)
4663 lp = get_eh_landing_pad_from_number (lp_nr);
4665 eh_edge = NULL;
4666 FOR_EACH_EDGE (e, ei, bb->succs)
4668 if (e->flags & EDGE_EH)
4670 if (eh_edge)
4672 error ("BB %i has multiple EH edges", bb->index);
4673 return true;
4675 else
4676 eh_edge = e;
4680 if (lp == NULL)
4682 if (eh_edge)
4684 error ("BB %i can not throw but has an EH edge", bb->index);
4685 return true;
4687 return false;
4690 if (!stmt_could_throw_p (stmt))
4692 error ("BB %i last statement has incorrectly set lp", bb->index);
4693 return true;
4696 if (eh_edge == NULL)
4698 error ("BB %i is missing an EH edge", bb->index);
4699 return true;
4702 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4704 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4705 return true;
4708 return false;
4711 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4713 DEBUG_FUNCTION bool
4714 verify_eh_dispatch_edge (geh_dispatch *stmt)
4716 eh_region r;
4717 eh_catch c;
4718 basic_block src, dst;
4719 bool want_fallthru = true;
4720 edge_iterator ei;
4721 edge e, fall_edge;
4723 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4724 src = gimple_bb (stmt);
4726 FOR_EACH_EDGE (e, ei, src->succs)
4727 gcc_assert (e->aux == NULL);
4729 switch (r->type)
4731 case ERT_TRY:
4732 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4734 dst = label_to_block (c->label);
4735 e = find_edge (src, dst);
4736 if (e == NULL)
4738 error ("BB %i is missing an edge", src->index);
4739 return true;
4741 e->aux = (void *)e;
4743 /* A catch-all handler doesn't have a fallthru. */
4744 if (c->type_list == NULL)
4746 want_fallthru = false;
4747 break;
4750 break;
4752 case ERT_ALLOWED_EXCEPTIONS:
4753 dst = label_to_block (r->u.allowed.label);
4754 e = find_edge (src, dst);
4755 if (e == NULL)
4757 error ("BB %i is missing an edge", src->index);
4758 return true;
4760 e->aux = (void *)e;
4761 break;
4763 default:
4764 gcc_unreachable ();
4767 fall_edge = NULL;
4768 FOR_EACH_EDGE (e, ei, src->succs)
4770 if (e->flags & EDGE_FALLTHRU)
4772 if (fall_edge != NULL)
4774 error ("BB %i too many fallthru edges", src->index);
4775 return true;
4777 fall_edge = e;
4779 else if (e->aux)
4780 e->aux = NULL;
4781 else
4783 error ("BB %i has incorrect edge", src->index);
4784 return true;
4787 if ((fall_edge != NULL) ^ want_fallthru)
4789 error ("BB %i has incorrect fallthru edge", src->index);
4790 return true;
4793 return false;