gcc/
[official-gcc.git] / gcc / tree-eh.c
blob7a6e04aeb57130d1e00fbde418e2b921beddd2eb
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-table.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "expr.h"
27 #include "calls.h"
28 #include "flags.h"
29 #include "hashtab.h"
30 #include "hash-set.h"
31 #include "vec.h"
32 #include "machmode.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "except.h"
37 #include "predict.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "cfganal.h"
41 #include "cfgcleanup.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimple-iterator.h"
50 #include "gimple-ssa.h"
51 #include "cgraph.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "tree-ssa.h"
59 #include "tree-inline.h"
60 #include "tree-pass.h"
61 #include "langhooks.h"
62 #include "diagnostic-core.h"
63 #include "target.h"
64 #include "cfgloop.h"
65 #include "gimple-low.h"
67 /* In some instances a tree and a gimple need to be stored in a same table,
68 i.e. in hash tables. This is a structure to do this. */
69 typedef union {tree *tp; tree t; gimple g;} treemple;
71 /* Misc functions used in this file. */
73 /* Remember and lookup EH landing pad data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
85 /* Add statement T in function IFUN to landing pad NUM. */
87 static void
88 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
90 gcc_assert (num != 0);
92 if (!get_eh_throw_stmt_table (ifun))
93 set_eh_throw_stmt_table (ifun, hash_map<gimple, int>::create_ggc (31));
95 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
98 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
100 void
101 add_stmt_to_eh_lp (gimple t, int num)
103 add_stmt_to_eh_lp_fn (cfun, t, num);
106 /* Add statement T to the single EH landing pad in REGION. */
108 static void
109 record_stmt_eh_region (eh_region region, gimple t)
111 if (region == NULL)
112 return;
113 if (region->type == ERT_MUST_NOT_THROW)
114 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
115 else
117 eh_landing_pad lp = region->landing_pads;
118 if (lp == NULL)
119 lp = gen_eh_landing_pad (region);
120 else
121 gcc_assert (lp->next_lp == NULL);
122 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
127 /* Remove statement T in function IFUN from its EH landing pad. */
129 bool
130 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
132 if (!get_eh_throw_stmt_table (ifun))
133 return false;
135 if (!get_eh_throw_stmt_table (ifun)->get (t))
136 return false;
138 get_eh_throw_stmt_table (ifun)->remove (t);
139 return true;
143 /* Remove statement T in the current function (cfun) from its
144 EH landing pad. */
146 bool
147 remove_stmt_from_eh_lp (gimple t)
149 return remove_stmt_from_eh_lp_fn (cfun, t);
152 /* Determine if statement T is inside an EH region in function IFUN.
153 Positive numbers indicate a landing pad index; negative numbers
154 indicate a MUST_NOT_THROW region index; zero indicates that the
155 statement is not recorded in the region table. */
158 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
160 if (ifun->eh->throw_stmt_table == NULL)
161 return 0;
163 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
164 return lp_nr ? *lp_nr : 0;
167 /* Likewise, but always use the current function. */
170 lookup_stmt_eh_lp (gimple t)
172 /* We can get called from initialized data when -fnon-call-exceptions
173 is on; prevent crash. */
174 if (!cfun)
175 return 0;
176 return lookup_stmt_eh_lp_fn (cfun, t);
179 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
180 nodes and LABEL_DECL nodes. We will use this during the second phase to
181 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
183 struct finally_tree_node
185 /* When storing a GIMPLE_TRY, we have to record a gimple. However
186 when deciding whether a GOTO to a certain LABEL_DECL (which is a
187 tree) leaves the TRY block, its necessary to record a tree in
188 this field. Thus a treemple is used. */
189 treemple child;
190 gimple parent;
193 /* Hashtable helpers. */
195 struct finally_tree_hasher : typed_free_remove <finally_tree_node>
197 typedef finally_tree_node value_type;
198 typedef finally_tree_node compare_type;
199 static inline hashval_t hash (const value_type *);
200 static inline bool equal (const value_type *, const compare_type *);
203 inline hashval_t
204 finally_tree_hasher::hash (const value_type *v)
206 return (intptr_t)v->child.t >> 4;
209 inline bool
210 finally_tree_hasher::equal (const value_type *v, const compare_type *c)
212 return v->child.t == c->child.t;
215 /* Note that this table is *not* marked GTY. It is short-lived. */
216 static hash_table<finally_tree_hasher> *finally_tree;
218 static void
219 record_in_finally_tree (treemple child, gimple parent)
221 struct finally_tree_node *n;
222 finally_tree_node **slot;
224 n = XNEW (struct finally_tree_node);
225 n->child = child;
226 n->parent = parent;
228 slot = finally_tree->find_slot (n, INSERT);
229 gcc_assert (!*slot);
230 *slot = n;
233 static void
234 collect_finally_tree (gimple stmt, gimple region);
236 /* Go through the gimple sequence. Works with collect_finally_tree to
237 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
239 static void
240 collect_finally_tree_1 (gimple_seq seq, gimple region)
242 gimple_stmt_iterator gsi;
244 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
245 collect_finally_tree (gsi_stmt (gsi), region);
248 static void
249 collect_finally_tree (gimple stmt, gimple region)
251 treemple temp;
253 switch (gimple_code (stmt))
255 case GIMPLE_LABEL:
256 temp.t = gimple_label_label (stmt);
257 record_in_finally_tree (temp, region);
258 break;
260 case GIMPLE_TRY:
261 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
263 temp.g = stmt;
264 record_in_finally_tree (temp, region);
265 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
266 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
268 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
270 collect_finally_tree_1 (gimple_try_eval (stmt), region);
271 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
273 break;
275 case GIMPLE_CATCH:
276 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
277 break;
279 case GIMPLE_EH_FILTER:
280 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
281 break;
283 case GIMPLE_EH_ELSE:
284 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
285 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
286 break;
288 default:
289 /* A type, a decl, or some kind of statement that we're not
290 interested in. Don't walk them. */
291 break;
296 /* Use the finally tree to determine if a jump from START to TARGET
297 would leave the try_finally node that START lives in. */
299 static bool
300 outside_finally_tree (treemple start, gimple target)
302 struct finally_tree_node n, *p;
306 n.child = start;
307 p = finally_tree->find (&n);
308 if (!p)
309 return true;
310 start.g = p->parent;
312 while (start.g != target);
314 return false;
317 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
318 nodes into a set of gotos, magic labels, and eh regions.
319 The eh region creation is straight-forward, but frobbing all the gotos
320 and such into shape isn't. */
322 /* The sequence into which we record all EH stuff. This will be
323 placed at the end of the function when we're all done. */
324 static gimple_seq eh_seq;
326 /* Record whether an EH region contains something that can throw,
327 indexed by EH region number. */
328 static bitmap eh_region_may_contain_throw_map;
330 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
331 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
332 The idea is to record a gimple statement for everything except for
333 the conditionals, which get their labels recorded. Since labels are
334 of type 'tree', we need this node to store both gimple and tree
335 objects. REPL_STMT is the sequence used to replace the goto/return
336 statement. CONT_STMT is used to store the statement that allows
337 the return/goto to jump to the original destination. */
339 struct goto_queue_node
341 treemple stmt;
342 location_t location;
343 gimple_seq repl_stmt;
344 gimple cont_stmt;
345 int index;
346 /* This is used when index >= 0 to indicate that stmt is a label (as
347 opposed to a goto stmt). */
348 int is_label;
351 /* State of the world while lowering. */
353 struct leh_state
355 /* What's "current" while constructing the eh region tree. These
356 correspond to variables of the same name in cfun->eh, which we
357 don't have easy access to. */
358 eh_region cur_region;
360 /* What's "current" for the purposes of __builtin_eh_pointer. For
361 a CATCH, this is the associated TRY. For an EH_FILTER, this is
362 the associated ALLOWED_EXCEPTIONS, etc. */
363 eh_region ehp_region;
365 /* Processing of TRY_FINALLY requires a bit more state. This is
366 split out into a separate structure so that we don't have to
367 copy so much when processing other nodes. */
368 struct leh_tf_state *tf;
371 struct leh_tf_state
373 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
374 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
375 this so that outside_finally_tree can reliably reference the tree used
376 in the collect_finally_tree data structures. */
377 gimple try_finally_expr;
378 gimple top_p;
380 /* While lowering a top_p usually it is expanded into multiple statements,
381 thus we need the following field to store them. */
382 gimple_seq top_p_seq;
384 /* The state outside this try_finally node. */
385 struct leh_state *outer;
387 /* The exception region created for it. */
388 eh_region region;
390 /* The goto queue. */
391 struct goto_queue_node *goto_queue;
392 size_t goto_queue_size;
393 size_t goto_queue_active;
395 /* Pointer map to help in searching goto_queue when it is large. */
396 hash_map<gimple, goto_queue_node *> *goto_queue_map;
398 /* The set of unique labels seen as entries in the goto queue. */
399 vec<tree> dest_array;
401 /* A label to be added at the end of the completed transformed
402 sequence. It will be set if may_fallthru was true *at one time*,
403 though subsequent transformations may have cleared that flag. */
404 tree fallthru_label;
406 /* True if it is possible to fall out the bottom of the try block.
407 Cleared if the fallthru is converted to a goto. */
408 bool may_fallthru;
410 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
411 bool may_return;
413 /* True if the finally block can receive an exception edge.
414 Cleared if the exception case is handled by code duplication. */
415 bool may_throw;
418 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
420 /* Search for STMT in the goto queue. Return the replacement,
421 or null if the statement isn't in the queue. */
423 #define LARGE_GOTO_QUEUE 20
425 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
427 static gimple_seq
428 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
430 unsigned int i;
432 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
434 for (i = 0; i < tf->goto_queue_active; i++)
435 if ( tf->goto_queue[i].stmt.g == stmt.g)
436 return tf->goto_queue[i].repl_stmt;
437 return NULL;
440 /* If we have a large number of entries in the goto_queue, create a
441 pointer map and use that for searching. */
443 if (!tf->goto_queue_map)
445 tf->goto_queue_map = new hash_map<gimple, goto_queue_node *>;
446 for (i = 0; i < tf->goto_queue_active; i++)
448 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
449 &tf->goto_queue[i]);
450 gcc_assert (!existed);
454 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
455 if (slot != NULL)
456 return ((*slot)->repl_stmt);
458 return NULL;
461 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
462 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
463 then we can just splat it in, otherwise we add the new stmts immediately
464 after the GIMPLE_COND and redirect. */
466 static void
467 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
468 gimple_stmt_iterator *gsi)
470 tree label;
471 gimple_seq new_seq;
472 treemple temp;
473 location_t loc = gimple_location (gsi_stmt (*gsi));
475 temp.tp = tp;
476 new_seq = find_goto_replacement (tf, temp);
477 if (!new_seq)
478 return;
480 if (gimple_seq_singleton_p (new_seq)
481 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
483 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
484 return;
487 label = create_artificial_label (loc);
488 /* Set the new label for the GIMPLE_COND */
489 *tp = label;
491 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
492 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
495 /* The real work of replace_goto_queue. Returns with TSI updated to
496 point to the next statement. */
498 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
500 static void
501 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
502 gimple_stmt_iterator *gsi)
504 gimple_seq seq;
505 treemple temp;
506 temp.g = NULL;
508 switch (gimple_code (stmt))
510 case GIMPLE_GOTO:
511 case GIMPLE_RETURN:
512 temp.g = stmt;
513 seq = find_goto_replacement (tf, temp);
514 if (seq)
516 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
517 gsi_remove (gsi, false);
518 return;
520 break;
522 case GIMPLE_COND:
523 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
524 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
525 break;
527 case GIMPLE_TRY:
528 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
529 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
530 break;
531 case GIMPLE_CATCH:
532 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
533 break;
534 case GIMPLE_EH_FILTER:
535 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
536 break;
537 case GIMPLE_EH_ELSE:
538 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
539 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
540 break;
542 default:
543 /* These won't have gotos in them. */
544 break;
547 gsi_next (gsi);
550 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
552 static void
553 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
555 gimple_stmt_iterator gsi = gsi_start (*seq);
557 while (!gsi_end_p (gsi))
558 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
561 /* Replace all goto queue members. */
563 static void
564 replace_goto_queue (struct leh_tf_state *tf)
566 if (tf->goto_queue_active == 0)
567 return;
568 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
569 replace_goto_queue_stmt_list (&eh_seq, tf);
572 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
573 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
574 a gimple return. */
576 static void
577 record_in_goto_queue (struct leh_tf_state *tf,
578 treemple new_stmt,
579 int index,
580 bool is_label,
581 location_t location)
583 size_t active, size;
584 struct goto_queue_node *q;
586 gcc_assert (!tf->goto_queue_map);
588 active = tf->goto_queue_active;
589 size = tf->goto_queue_size;
590 if (active >= size)
592 size = (size ? size * 2 : 32);
593 tf->goto_queue_size = size;
594 tf->goto_queue
595 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
598 q = &tf->goto_queue[active];
599 tf->goto_queue_active = active + 1;
601 memset (q, 0, sizeof (*q));
602 q->stmt = new_stmt;
603 q->index = index;
604 q->location = location;
605 q->is_label = is_label;
608 /* Record the LABEL label in the goto queue contained in TF.
609 TF is not null. */
611 static void
612 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
613 location_t location)
615 int index;
616 treemple temp, new_stmt;
618 if (!label)
619 return;
621 /* Computed and non-local gotos do not get processed. Given
622 their nature we can neither tell whether we've escaped the
623 finally block nor redirect them if we knew. */
624 if (TREE_CODE (label) != LABEL_DECL)
625 return;
627 /* No need to record gotos that don't leave the try block. */
628 temp.t = label;
629 if (!outside_finally_tree (temp, tf->try_finally_expr))
630 return;
632 if (! tf->dest_array.exists ())
634 tf->dest_array.create (10);
635 tf->dest_array.quick_push (label);
636 index = 0;
638 else
640 int n = tf->dest_array.length ();
641 for (index = 0; index < n; ++index)
642 if (tf->dest_array[index] == label)
643 break;
644 if (index == n)
645 tf->dest_array.safe_push (label);
648 /* In the case of a GOTO we want to record the destination label,
649 since with a GIMPLE_COND we have an easy access to the then/else
650 labels. */
651 new_stmt = stmt;
652 record_in_goto_queue (tf, new_stmt, index, true, location);
655 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
656 node, and if so record that fact in the goto queue associated with that
657 try_finally node. */
659 static void
660 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
662 struct leh_tf_state *tf = state->tf;
663 treemple new_stmt;
665 if (!tf)
666 return;
668 switch (gimple_code (stmt))
670 case GIMPLE_COND:
671 new_stmt.tp = gimple_op_ptr (stmt, 2);
672 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
673 EXPR_LOCATION (*new_stmt.tp));
674 new_stmt.tp = gimple_op_ptr (stmt, 3);
675 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
676 EXPR_LOCATION (*new_stmt.tp));
677 break;
678 case GIMPLE_GOTO:
679 new_stmt.g = stmt;
680 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
681 gimple_location (stmt));
682 break;
684 case GIMPLE_RETURN:
685 tf->may_return = true;
686 new_stmt.g = stmt;
687 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
688 break;
690 default:
691 gcc_unreachable ();
696 #ifdef ENABLE_CHECKING
697 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
698 was in fact structured, and we've not yet done jump threading, then none
699 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
701 static void
702 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
704 struct leh_tf_state *tf = state->tf;
705 size_t i, n;
707 if (!tf)
708 return;
710 n = gimple_switch_num_labels (switch_expr);
712 for (i = 0; i < n; ++i)
714 treemple temp;
715 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
716 temp.t = lab;
717 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
720 #else
721 #define verify_norecord_switch_expr(state, switch_expr)
722 #endif
724 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
725 non-null, insert it before the new branch. */
727 static void
728 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
730 gimple x;
732 /* In the case of a return, the queue node must be a gimple statement. */
733 gcc_assert (!q->is_label);
735 /* Note that the return value may have already been computed, e.g.,
737 int x;
738 int foo (void)
740 x = 0;
741 try {
742 return x;
743 } finally {
744 x++;
748 should return 0, not 1. We don't have to do anything to make
749 this happens because the return value has been placed in the
750 RESULT_DECL already. */
752 q->cont_stmt = q->stmt.g;
754 if (mod)
755 gimple_seq_add_seq (&q->repl_stmt, mod);
757 x = gimple_build_goto (finlab);
758 gimple_set_location (x, q->location);
759 gimple_seq_add_stmt (&q->repl_stmt, x);
762 /* Similar, but easier, for GIMPLE_GOTO. */
764 static void
765 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
766 struct leh_tf_state *tf)
768 gimple x;
770 gcc_assert (q->is_label);
772 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
774 if (mod)
775 gimple_seq_add_seq (&q->repl_stmt, mod);
777 x = gimple_build_goto (finlab);
778 gimple_set_location (x, q->location);
779 gimple_seq_add_stmt (&q->repl_stmt, x);
782 /* Emit a standard landing pad sequence into SEQ for REGION. */
784 static void
785 emit_post_landing_pad (gimple_seq *seq, eh_region region)
787 eh_landing_pad lp = region->landing_pads;
788 gimple x;
790 if (lp == NULL)
791 lp = gen_eh_landing_pad (region);
793 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
794 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
796 x = gimple_build_label (lp->post_landing_pad);
797 gimple_seq_add_stmt (seq, x);
800 /* Emit a RESX statement into SEQ for REGION. */
802 static void
803 emit_resx (gimple_seq *seq, eh_region region)
805 gimple x = gimple_build_resx (region->index);
806 gimple_seq_add_stmt (seq, x);
807 if (region->outer)
808 record_stmt_eh_region (region->outer, x);
811 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
813 static void
814 emit_eh_dispatch (gimple_seq *seq, eh_region region)
816 gimple x = gimple_build_eh_dispatch (region->index);
817 gimple_seq_add_stmt (seq, x);
820 /* Note that the current EH region may contain a throw, or a
821 call to a function which itself may contain a throw. */
823 static void
824 note_eh_region_may_contain_throw (eh_region region)
826 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
828 if (region->type == ERT_MUST_NOT_THROW)
829 break;
830 region = region->outer;
831 if (region == NULL)
832 break;
836 /* Check if REGION has been marked as containing a throw. If REGION is
837 NULL, this predicate is false. */
839 static inline bool
840 eh_region_may_contain_throw (eh_region r)
842 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
845 /* We want to transform
846 try { body; } catch { stuff; }
848 normal_seqence:
849 body;
850 over:
851 eh_seqence:
852 landing_pad:
853 stuff;
854 goto over;
856 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
857 should be placed before the second operand, or NULL. OVER is
858 an existing label that should be put at the exit, or NULL. */
860 static gimple_seq
861 frob_into_branch_around (gimple tp, eh_region region, tree over)
863 gimple x;
864 gimple_seq cleanup, result;
865 location_t loc = gimple_location (tp);
867 cleanup = gimple_try_cleanup (tp);
868 result = gimple_try_eval (tp);
870 if (region)
871 emit_post_landing_pad (&eh_seq, region);
873 if (gimple_seq_may_fallthru (cleanup))
875 if (!over)
876 over = create_artificial_label (loc);
877 x = gimple_build_goto (over);
878 gimple_set_location (x, loc);
879 gimple_seq_add_stmt (&cleanup, x);
881 gimple_seq_add_seq (&eh_seq, cleanup);
883 if (over)
885 x = gimple_build_label (over);
886 gimple_seq_add_stmt (&result, x);
888 return result;
891 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
892 Make sure to record all new labels found. */
894 static gimple_seq
895 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
896 location_t loc)
898 gimple region = NULL;
899 gimple_seq new_seq;
900 gimple_stmt_iterator gsi;
902 new_seq = copy_gimple_seq_and_replace_locals (seq);
904 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
906 gimple stmt = gsi_stmt (gsi);
907 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
909 tree block = gimple_block (stmt);
910 gimple_set_location (stmt, loc);
911 gimple_set_block (stmt, block);
915 if (outer_state->tf)
916 region = outer_state->tf->try_finally_expr;
917 collect_finally_tree_1 (new_seq, region);
919 return new_seq;
922 /* A subroutine of lower_try_finally. Create a fallthru label for
923 the given try_finally state. The only tricky bit here is that
924 we have to make sure to record the label in our outer context. */
926 static tree
927 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
929 tree label = tf->fallthru_label;
930 treemple temp;
932 if (!label)
934 label = create_artificial_label (gimple_location (tf->try_finally_expr));
935 tf->fallthru_label = label;
936 if (tf->outer->tf)
938 temp.t = label;
939 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
942 return label;
945 /* A subroutine of lower_try_finally. If FINALLY consits of a
946 GIMPLE_EH_ELSE node, return it. */
948 static inline gimple
949 get_eh_else (gimple_seq finally)
951 gimple x = gimple_seq_first_stmt (finally);
952 if (gimple_code (x) == GIMPLE_EH_ELSE)
954 gcc_assert (gimple_seq_singleton_p (finally));
955 return x;
957 return NULL;
960 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
961 langhook returns non-null, then the language requires that the exception
962 path out of a try_finally be treated specially. To wit: the code within
963 the finally block may not itself throw an exception. We have two choices
964 here. First we can duplicate the finally block and wrap it in a
965 must_not_throw region. Second, we can generate code like
967 try {
968 finally_block;
969 } catch {
970 if (fintmp == eh_edge)
971 protect_cleanup_actions;
974 where "fintmp" is the temporary used in the switch statement generation
975 alternative considered below. For the nonce, we always choose the first
976 option.
978 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
980 static void
981 honor_protect_cleanup_actions (struct leh_state *outer_state,
982 struct leh_state *this_state,
983 struct leh_tf_state *tf)
985 tree protect_cleanup_actions;
986 gimple_stmt_iterator gsi;
987 bool finally_may_fallthru;
988 gimple_seq finally;
989 gimple x, eh_else;
991 /* First check for nothing to do. */
992 if (lang_hooks.eh_protect_cleanup_actions == NULL)
993 return;
994 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
995 if (protect_cleanup_actions == NULL)
996 return;
998 finally = gimple_try_cleanup (tf->top_p);
999 eh_else = get_eh_else (finally);
1001 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1002 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1003 if (eh_else)
1005 finally = gimple_eh_else_e_body (eh_else);
1006 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1008 else if (this_state)
1009 finally = lower_try_finally_dup_block (finally, outer_state,
1010 gimple_location (tf->try_finally_expr));
1011 finally_may_fallthru = gimple_seq_may_fallthru (finally);
1013 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1014 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1015 to be in an enclosing scope, but needs to be implemented at this level
1016 to avoid a nesting violation (see wrap_temporary_cleanups in
1017 cp/decl.c). Since it's logically at an outer level, we should call
1018 terminate before we get to it, so strip it away before adding the
1019 MUST_NOT_THROW filter. */
1020 gsi = gsi_start (finally);
1021 x = gsi_stmt (gsi);
1022 if (gimple_code (x) == GIMPLE_TRY
1023 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1024 && gimple_try_catch_is_cleanup (x))
1026 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1027 gsi_remove (&gsi, false);
1030 /* Wrap the block with protect_cleanup_actions as the action. */
1031 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1032 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1033 GIMPLE_TRY_CATCH);
1034 finally = lower_eh_must_not_throw (outer_state, x);
1036 /* Drop all of this into the exception sequence. */
1037 emit_post_landing_pad (&eh_seq, tf->region);
1038 gimple_seq_add_seq (&eh_seq, finally);
1039 if (finally_may_fallthru)
1040 emit_resx (&eh_seq, tf->region);
1042 /* Having now been handled, EH isn't to be considered with
1043 the rest of the outgoing edges. */
1044 tf->may_throw = false;
1047 /* A subroutine of lower_try_finally. We have determined that there is
1048 no fallthru edge out of the finally block. This means that there is
1049 no outgoing edge corresponding to any incoming edge. Restructure the
1050 try_finally node for this special case. */
1052 static void
1053 lower_try_finally_nofallthru (struct leh_state *state,
1054 struct leh_tf_state *tf)
1056 tree lab;
1057 gimple x, eh_else;
1058 gimple_seq finally;
1059 struct goto_queue_node *q, *qe;
1061 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1063 /* We expect that tf->top_p is a GIMPLE_TRY. */
1064 finally = gimple_try_cleanup (tf->top_p);
1065 tf->top_p_seq = gimple_try_eval (tf->top_p);
1067 x = gimple_build_label (lab);
1068 gimple_seq_add_stmt (&tf->top_p_seq, x);
1070 q = tf->goto_queue;
1071 qe = q + tf->goto_queue_active;
1072 for (; q < qe; ++q)
1073 if (q->index < 0)
1074 do_return_redirection (q, lab, NULL);
1075 else
1076 do_goto_redirection (q, lab, NULL, tf);
1078 replace_goto_queue (tf);
1080 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1081 eh_else = get_eh_else (finally);
1082 if (eh_else)
1084 finally = gimple_eh_else_n_body (eh_else);
1085 lower_eh_constructs_1 (state, &finally);
1086 gimple_seq_add_seq (&tf->top_p_seq, finally);
1088 if (tf->may_throw)
1090 finally = gimple_eh_else_e_body (eh_else);
1091 lower_eh_constructs_1 (state, &finally);
1093 emit_post_landing_pad (&eh_seq, tf->region);
1094 gimple_seq_add_seq (&eh_seq, finally);
1097 else
1099 lower_eh_constructs_1 (state, &finally);
1100 gimple_seq_add_seq (&tf->top_p_seq, finally);
1102 if (tf->may_throw)
1104 emit_post_landing_pad (&eh_seq, tf->region);
1106 x = gimple_build_goto (lab);
1107 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1108 gimple_seq_add_stmt (&eh_seq, x);
1113 /* A subroutine of lower_try_finally. We have determined that there is
1114 exactly one destination of the finally block. Restructure the
1115 try_finally node for this special case. */
1117 static void
1118 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1120 struct goto_queue_node *q, *qe;
1121 gimple x;
1122 gimple_seq finally;
1123 gimple_stmt_iterator gsi;
1124 tree finally_label;
1125 location_t loc = gimple_location (tf->try_finally_expr);
1127 finally = gimple_try_cleanup (tf->top_p);
1128 tf->top_p_seq = gimple_try_eval (tf->top_p);
1130 /* Since there's only one destination, and the destination edge can only
1131 either be EH or non-EH, that implies that all of our incoming edges
1132 are of the same type. Therefore we can lower EH_ELSE immediately. */
1133 x = get_eh_else (finally);
1134 if (x)
1136 if (tf->may_throw)
1137 finally = gimple_eh_else_e_body (x);
1138 else
1139 finally = gimple_eh_else_n_body (x);
1142 lower_eh_constructs_1 (state, &finally);
1144 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1146 gimple stmt = gsi_stmt (gsi);
1147 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1149 tree block = gimple_block (stmt);
1150 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1151 gimple_set_block (stmt, block);
1155 if (tf->may_throw)
1157 /* Only reachable via the exception edge. Add the given label to
1158 the head of the FINALLY block. Append a RESX at the end. */
1159 emit_post_landing_pad (&eh_seq, tf->region);
1160 gimple_seq_add_seq (&eh_seq, finally);
1161 emit_resx (&eh_seq, tf->region);
1162 return;
1165 if (tf->may_fallthru)
1167 /* Only reachable via the fallthru edge. Do nothing but let
1168 the two blocks run together; we'll fall out the bottom. */
1169 gimple_seq_add_seq (&tf->top_p_seq, finally);
1170 return;
1173 finally_label = create_artificial_label (loc);
1174 x = gimple_build_label (finally_label);
1175 gimple_seq_add_stmt (&tf->top_p_seq, x);
1177 gimple_seq_add_seq (&tf->top_p_seq, finally);
1179 q = tf->goto_queue;
1180 qe = q + tf->goto_queue_active;
1182 if (tf->may_return)
1184 /* Reachable by return expressions only. Redirect them. */
1185 for (; q < qe; ++q)
1186 do_return_redirection (q, finally_label, NULL);
1187 replace_goto_queue (tf);
1189 else
1191 /* Reachable by goto expressions only. Redirect them. */
1192 for (; q < qe; ++q)
1193 do_goto_redirection (q, finally_label, NULL, tf);
1194 replace_goto_queue (tf);
1196 if (tf->dest_array[0] == tf->fallthru_label)
1198 /* Reachable by goto to fallthru label only. Redirect it
1199 to the new label (already created, sadly), and do not
1200 emit the final branch out, or the fallthru label. */
1201 tf->fallthru_label = NULL;
1202 return;
1206 /* Place the original return/goto to the original destination
1207 immediately after the finally block. */
1208 x = tf->goto_queue[0].cont_stmt;
1209 gimple_seq_add_stmt (&tf->top_p_seq, x);
1210 maybe_record_in_goto_queue (state, x);
1213 /* A subroutine of lower_try_finally. There are multiple edges incoming
1214 and outgoing from the finally block. Implement this by duplicating the
1215 finally block for every destination. */
1217 static void
1218 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1220 gimple_seq finally;
1221 gimple_seq new_stmt;
1222 gimple_seq seq;
1223 gimple x, eh_else;
1224 tree tmp;
1225 location_t tf_loc = gimple_location (tf->try_finally_expr);
1227 finally = gimple_try_cleanup (tf->top_p);
1229 /* Notice EH_ELSE, and simplify some of the remaining code
1230 by considering FINALLY to be the normal return path only. */
1231 eh_else = get_eh_else (finally);
1232 if (eh_else)
1233 finally = gimple_eh_else_n_body (eh_else);
1235 tf->top_p_seq = gimple_try_eval (tf->top_p);
1236 new_stmt = NULL;
1238 if (tf->may_fallthru)
1240 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1241 lower_eh_constructs_1 (state, &seq);
1242 gimple_seq_add_seq (&new_stmt, seq);
1244 tmp = lower_try_finally_fallthru_label (tf);
1245 x = gimple_build_goto (tmp);
1246 gimple_set_location (x, tf_loc);
1247 gimple_seq_add_stmt (&new_stmt, x);
1250 if (tf->may_throw)
1252 /* We don't need to copy the EH path of EH_ELSE,
1253 since it is only emitted once. */
1254 if (eh_else)
1255 seq = gimple_eh_else_e_body (eh_else);
1256 else
1257 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1258 lower_eh_constructs_1 (state, &seq);
1260 emit_post_landing_pad (&eh_seq, tf->region);
1261 gimple_seq_add_seq (&eh_seq, seq);
1262 emit_resx (&eh_seq, tf->region);
1265 if (tf->goto_queue)
1267 struct goto_queue_node *q, *qe;
1268 int return_index, index;
1269 struct labels_s
1271 struct goto_queue_node *q;
1272 tree label;
1273 } *labels;
1275 return_index = tf->dest_array.length ();
1276 labels = XCNEWVEC (struct labels_s, return_index + 1);
1278 q = tf->goto_queue;
1279 qe = q + tf->goto_queue_active;
1280 for (; q < qe; q++)
1282 index = q->index < 0 ? return_index : q->index;
1284 if (!labels[index].q)
1285 labels[index].q = q;
1288 for (index = 0; index < return_index + 1; index++)
1290 tree lab;
1292 q = labels[index].q;
1293 if (! q)
1294 continue;
1296 lab = labels[index].label
1297 = create_artificial_label (tf_loc);
1299 if (index == return_index)
1300 do_return_redirection (q, lab, NULL);
1301 else
1302 do_goto_redirection (q, lab, NULL, tf);
1304 x = gimple_build_label (lab);
1305 gimple_seq_add_stmt (&new_stmt, x);
1307 seq = lower_try_finally_dup_block (finally, state, q->location);
1308 lower_eh_constructs_1 (state, &seq);
1309 gimple_seq_add_seq (&new_stmt, seq);
1311 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1312 maybe_record_in_goto_queue (state, q->cont_stmt);
1315 for (q = tf->goto_queue; q < qe; q++)
1317 tree lab;
1319 index = q->index < 0 ? return_index : q->index;
1321 if (labels[index].q == q)
1322 continue;
1324 lab = labels[index].label;
1326 if (index == return_index)
1327 do_return_redirection (q, lab, NULL);
1328 else
1329 do_goto_redirection (q, lab, NULL, tf);
1332 replace_goto_queue (tf);
1333 free (labels);
1336 /* Need to link new stmts after running replace_goto_queue due
1337 to not wanting to process the same goto stmts twice. */
1338 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1341 /* A subroutine of lower_try_finally. There are multiple edges incoming
1342 and outgoing from the finally block. Implement this by instrumenting
1343 each incoming edge and creating a switch statement at the end of the
1344 finally block that branches to the appropriate destination. */
1346 static void
1347 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1349 struct goto_queue_node *q, *qe;
1350 tree finally_tmp, finally_label;
1351 int return_index, eh_index, fallthru_index;
1352 int nlabels, ndests, j, last_case_index;
1353 tree last_case;
1354 vec<tree> case_label_vec;
1355 gimple_seq switch_body = NULL;
1356 gimple x, eh_else;
1357 tree tmp;
1358 gimple switch_stmt;
1359 gimple_seq finally;
1360 hash_map<tree, gimple> *cont_map = NULL;
1361 /* The location of the TRY_FINALLY stmt. */
1362 location_t tf_loc = gimple_location (tf->try_finally_expr);
1363 /* The location of the finally block. */
1364 location_t finally_loc;
1366 finally = gimple_try_cleanup (tf->top_p);
1367 eh_else = get_eh_else (finally);
1369 /* Mash the TRY block to the head of the chain. */
1370 tf->top_p_seq = gimple_try_eval (tf->top_p);
1372 /* The location of the finally is either the last stmt in the finally
1373 block or the location of the TRY_FINALLY itself. */
1374 x = gimple_seq_last_stmt (finally);
1375 finally_loc = x ? gimple_location (x) : tf_loc;
1377 /* Prepare for switch statement generation. */
1378 nlabels = tf->dest_array.length ();
1379 return_index = nlabels;
1380 eh_index = return_index + tf->may_return;
1381 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1382 ndests = fallthru_index + tf->may_fallthru;
1384 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1385 finally_label = create_artificial_label (finally_loc);
1387 /* We use vec::quick_push on case_label_vec throughout this function,
1388 since we know the size in advance and allocate precisely as muce
1389 space as needed. */
1390 case_label_vec.create (ndests);
1391 last_case = NULL;
1392 last_case_index = 0;
1394 /* Begin inserting code for getting to the finally block. Things
1395 are done in this order to correspond to the sequence the code is
1396 laid out. */
1398 if (tf->may_fallthru)
1400 x = gimple_build_assign (finally_tmp,
1401 build_int_cst (integer_type_node,
1402 fallthru_index));
1403 gimple_seq_add_stmt (&tf->top_p_seq, x);
1405 tmp = build_int_cst (integer_type_node, fallthru_index);
1406 last_case = build_case_label (tmp, NULL,
1407 create_artificial_label (tf_loc));
1408 case_label_vec.quick_push (last_case);
1409 last_case_index++;
1411 x = gimple_build_label (CASE_LABEL (last_case));
1412 gimple_seq_add_stmt (&switch_body, x);
1414 tmp = lower_try_finally_fallthru_label (tf);
1415 x = gimple_build_goto (tmp);
1416 gimple_set_location (x, tf_loc);
1417 gimple_seq_add_stmt (&switch_body, x);
1420 /* For EH_ELSE, emit the exception path (plus resx) now, then
1421 subsequently we only need consider the normal path. */
1422 if (eh_else)
1424 if (tf->may_throw)
1426 finally = gimple_eh_else_e_body (eh_else);
1427 lower_eh_constructs_1 (state, &finally);
1429 emit_post_landing_pad (&eh_seq, tf->region);
1430 gimple_seq_add_seq (&eh_seq, finally);
1431 emit_resx (&eh_seq, tf->region);
1434 finally = gimple_eh_else_n_body (eh_else);
1436 else if (tf->may_throw)
1438 emit_post_landing_pad (&eh_seq, tf->region);
1440 x = gimple_build_assign (finally_tmp,
1441 build_int_cst (integer_type_node, eh_index));
1442 gimple_seq_add_stmt (&eh_seq, x);
1444 x = gimple_build_goto (finally_label);
1445 gimple_set_location (x, tf_loc);
1446 gimple_seq_add_stmt (&eh_seq, x);
1448 tmp = build_int_cst (integer_type_node, eh_index);
1449 last_case = build_case_label (tmp, NULL,
1450 create_artificial_label (tf_loc));
1451 case_label_vec.quick_push (last_case);
1452 last_case_index++;
1454 x = gimple_build_label (CASE_LABEL (last_case));
1455 gimple_seq_add_stmt (&eh_seq, x);
1456 emit_resx (&eh_seq, tf->region);
1459 x = gimple_build_label (finally_label);
1460 gimple_seq_add_stmt (&tf->top_p_seq, x);
1462 lower_eh_constructs_1 (state, &finally);
1463 gimple_seq_add_seq (&tf->top_p_seq, finally);
1465 /* Redirect each incoming goto edge. */
1466 q = tf->goto_queue;
1467 qe = q + tf->goto_queue_active;
1468 j = last_case_index + tf->may_return;
1469 /* Prepare the assignments to finally_tmp that are executed upon the
1470 entrance through a particular edge. */
1471 for (; q < qe; ++q)
1473 gimple_seq mod = NULL;
1474 int switch_id;
1475 unsigned int case_index;
1477 if (q->index < 0)
1479 x = gimple_build_assign (finally_tmp,
1480 build_int_cst (integer_type_node,
1481 return_index));
1482 gimple_seq_add_stmt (&mod, x);
1483 do_return_redirection (q, finally_label, mod);
1484 switch_id = return_index;
1486 else
1488 x = gimple_build_assign (finally_tmp,
1489 build_int_cst (integer_type_node, q->index));
1490 gimple_seq_add_stmt (&mod, x);
1491 do_goto_redirection (q, finally_label, mod, tf);
1492 switch_id = q->index;
1495 case_index = j + q->index;
1496 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1498 tree case_lab;
1499 tmp = build_int_cst (integer_type_node, switch_id);
1500 case_lab = build_case_label (tmp, NULL,
1501 create_artificial_label (tf_loc));
1502 /* We store the cont_stmt in the pointer map, so that we can recover
1503 it in the loop below. */
1504 if (!cont_map)
1505 cont_map = new hash_map<tree, gimple>;
1506 cont_map->put (case_lab, q->cont_stmt);
1507 case_label_vec.quick_push (case_lab);
1510 for (j = last_case_index; j < last_case_index + nlabels; j++)
1512 gimple cont_stmt;
1514 last_case = case_label_vec[j];
1516 gcc_assert (last_case);
1517 gcc_assert (cont_map);
1519 cont_stmt = *cont_map->get (last_case);
1521 x = gimple_build_label (CASE_LABEL (last_case));
1522 gimple_seq_add_stmt (&switch_body, x);
1523 gimple_seq_add_stmt (&switch_body, cont_stmt);
1524 maybe_record_in_goto_queue (state, cont_stmt);
1526 if (cont_map)
1527 delete cont_map;
1529 replace_goto_queue (tf);
1531 /* Make sure that the last case is the default label, as one is required.
1532 Then sort the labels, which is also required in GIMPLE. */
1533 CASE_LOW (last_case) = NULL;
1534 tree tem = case_label_vec.pop ();
1535 gcc_assert (tem == last_case);
1536 sort_case_labels (case_label_vec);
1538 /* Build the switch statement, setting last_case to be the default
1539 label. */
1540 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1541 case_label_vec);
1542 gimple_set_location (switch_stmt, finally_loc);
1544 /* Need to link SWITCH_STMT after running replace_goto_queue
1545 due to not wanting to process the same goto stmts twice. */
1546 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1547 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1550 /* Decide whether or not we are going to duplicate the finally block.
1551 There are several considerations.
1553 First, if this is Java, then the finally block contains code
1554 written by the user. It has line numbers associated with it,
1555 so duplicating the block means it's difficult to set a breakpoint.
1556 Since controlling code generation via -g is verboten, we simply
1557 never duplicate code without optimization.
1559 Second, we'd like to prevent egregious code growth. One way to
1560 do this is to estimate the size of the finally block, multiply
1561 that by the number of copies we'd need to make, and compare against
1562 the estimate of the size of the switch machinery we'd have to add. */
1564 static bool
1565 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1567 int f_estimate, sw_estimate;
1568 gimple eh_else;
1570 /* If there's an EH_ELSE involved, the exception path is separate
1571 and really doesn't come into play for this computation. */
1572 eh_else = get_eh_else (finally);
1573 if (eh_else)
1575 ndests -= may_throw;
1576 finally = gimple_eh_else_n_body (eh_else);
1579 if (!optimize)
1581 gimple_stmt_iterator gsi;
1583 if (ndests == 1)
1584 return true;
1586 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1588 gimple stmt = gsi_stmt (gsi);
1589 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1590 return false;
1592 return true;
1595 /* Finally estimate N times, plus N gotos. */
1596 f_estimate = count_insns_seq (finally, &eni_size_weights);
1597 f_estimate = (f_estimate + 1) * ndests;
1599 /* Switch statement (cost 10), N variable assignments, N gotos. */
1600 sw_estimate = 10 + 2 * ndests;
1602 /* Optimize for size clearly wants our best guess. */
1603 if (optimize_function_for_size_p (cfun))
1604 return f_estimate < sw_estimate;
1606 /* ??? These numbers are completely made up so far. */
1607 if (optimize > 1)
1608 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1609 else
1610 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1613 /* REG is the enclosing region for a possible cleanup region, or the region
1614 itself. Returns TRUE if such a region would be unreachable.
1616 Cleanup regions within a must-not-throw region aren't actually reachable
1617 even if there are throwing stmts within them, because the personality
1618 routine will call terminate before unwinding. */
1620 static bool
1621 cleanup_is_dead_in (eh_region reg)
1623 while (reg && reg->type == ERT_CLEANUP)
1624 reg = reg->outer;
1625 return (reg && reg->type == ERT_MUST_NOT_THROW);
1628 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1629 to a sequence of labels and blocks, plus the exception region trees
1630 that record all the magic. This is complicated by the need to
1631 arrange for the FINALLY block to be executed on all exits. */
1633 static gimple_seq
1634 lower_try_finally (struct leh_state *state, gimple tp)
1636 struct leh_tf_state this_tf;
1637 struct leh_state this_state;
1638 int ndests;
1639 gimple_seq old_eh_seq;
1641 /* Process the try block. */
1643 memset (&this_tf, 0, sizeof (this_tf));
1644 this_tf.try_finally_expr = tp;
1645 this_tf.top_p = tp;
1646 this_tf.outer = state;
1647 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
1649 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1650 this_state.cur_region = this_tf.region;
1652 else
1654 this_tf.region = NULL;
1655 this_state.cur_region = state->cur_region;
1658 this_state.ehp_region = state->ehp_region;
1659 this_state.tf = &this_tf;
1661 old_eh_seq = eh_seq;
1662 eh_seq = NULL;
1664 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1666 /* Determine if the try block is escaped through the bottom. */
1667 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1669 /* Determine if any exceptions are possible within the try block. */
1670 if (this_tf.region)
1671 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1672 if (this_tf.may_throw)
1673 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1675 /* Determine how many edges (still) reach the finally block. Or rather,
1676 how many destinations are reached by the finally block. Use this to
1677 determine how we process the finally block itself. */
1679 ndests = this_tf.dest_array.length ();
1680 ndests += this_tf.may_fallthru;
1681 ndests += this_tf.may_return;
1682 ndests += this_tf.may_throw;
1684 /* If the FINALLY block is not reachable, dike it out. */
1685 if (ndests == 0)
1687 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1688 gimple_try_set_cleanup (tp, NULL);
1690 /* If the finally block doesn't fall through, then any destination
1691 we might try to impose there isn't reached either. There may be
1692 some minor amount of cleanup and redirection still needed. */
1693 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1694 lower_try_finally_nofallthru (state, &this_tf);
1696 /* We can easily special-case redirection to a single destination. */
1697 else if (ndests == 1)
1698 lower_try_finally_onedest (state, &this_tf);
1699 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1700 gimple_try_cleanup (tp)))
1701 lower_try_finally_copy (state, &this_tf);
1702 else
1703 lower_try_finally_switch (state, &this_tf);
1705 /* If someone requested we add a label at the end of the transformed
1706 block, do so. */
1707 if (this_tf.fallthru_label)
1709 /* This must be reached only if ndests == 0. */
1710 gimple x = gimple_build_label (this_tf.fallthru_label);
1711 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1714 this_tf.dest_array.release ();
1715 free (this_tf.goto_queue);
1716 if (this_tf.goto_queue_map)
1717 delete this_tf.goto_queue_map;
1719 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1720 If there was no old eh_seq, then the append is trivially already done. */
1721 if (old_eh_seq)
1723 if (eh_seq == NULL)
1724 eh_seq = old_eh_seq;
1725 else
1727 gimple_seq new_eh_seq = eh_seq;
1728 eh_seq = old_eh_seq;
1729 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1733 return this_tf.top_p_seq;
1736 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1737 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1738 exception region trees that records all the magic. */
1740 static gimple_seq
1741 lower_catch (struct leh_state *state, gimple tp)
1743 eh_region try_region = NULL;
1744 struct leh_state this_state = *state;
1745 gimple_stmt_iterator gsi;
1746 tree out_label;
1747 gimple_seq new_seq, cleanup;
1748 gimple x;
1749 location_t try_catch_loc = gimple_location (tp);
1751 if (flag_exceptions)
1753 try_region = gen_eh_region_try (state->cur_region);
1754 this_state.cur_region = try_region;
1757 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1759 if (!eh_region_may_contain_throw (try_region))
1760 return gimple_try_eval (tp);
1762 new_seq = NULL;
1763 emit_eh_dispatch (&new_seq, try_region);
1764 emit_resx (&new_seq, try_region);
1766 this_state.cur_region = state->cur_region;
1767 this_state.ehp_region = try_region;
1769 out_label = NULL;
1770 cleanup = gimple_try_cleanup (tp);
1771 for (gsi = gsi_start (cleanup);
1772 !gsi_end_p (gsi);
1773 gsi_next (&gsi))
1775 eh_catch c;
1776 gimple gcatch;
1777 gimple_seq handler;
1779 gcatch = gsi_stmt (gsi);
1780 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1782 handler = gimple_catch_handler (gcatch);
1783 lower_eh_constructs_1 (&this_state, &handler);
1785 c->label = create_artificial_label (UNKNOWN_LOCATION);
1786 x = gimple_build_label (c->label);
1787 gimple_seq_add_stmt (&new_seq, x);
1789 gimple_seq_add_seq (&new_seq, handler);
1791 if (gimple_seq_may_fallthru (new_seq))
1793 if (!out_label)
1794 out_label = create_artificial_label (try_catch_loc);
1796 x = gimple_build_goto (out_label);
1797 gimple_seq_add_stmt (&new_seq, x);
1799 if (!c->type_list)
1800 break;
1803 gimple_try_set_cleanup (tp, new_seq);
1805 return frob_into_branch_around (tp, try_region, out_label);
1808 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1809 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1810 region trees that record all the magic. */
1812 static gimple_seq
1813 lower_eh_filter (struct leh_state *state, gimple tp)
1815 struct leh_state this_state = *state;
1816 eh_region this_region = NULL;
1817 gimple inner, x;
1818 gimple_seq new_seq;
1820 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1822 if (flag_exceptions)
1824 this_region = gen_eh_region_allowed (state->cur_region,
1825 gimple_eh_filter_types (inner));
1826 this_state.cur_region = this_region;
1829 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1831 if (!eh_region_may_contain_throw (this_region))
1832 return gimple_try_eval (tp);
1834 new_seq = NULL;
1835 this_state.cur_region = state->cur_region;
1836 this_state.ehp_region = this_region;
1838 emit_eh_dispatch (&new_seq, this_region);
1839 emit_resx (&new_seq, this_region);
1841 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1842 x = gimple_build_label (this_region->u.allowed.label);
1843 gimple_seq_add_stmt (&new_seq, x);
1845 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1846 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1848 gimple_try_set_cleanup (tp, new_seq);
1850 return frob_into_branch_around (tp, this_region, NULL);
1853 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1854 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1855 plus the exception region trees that record all the magic. */
1857 static gimple_seq
1858 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1860 struct leh_state this_state = *state;
1862 if (flag_exceptions)
1864 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1865 eh_region this_region;
1867 this_region = gen_eh_region_must_not_throw (state->cur_region);
1868 this_region->u.must_not_throw.failure_decl
1869 = gimple_eh_must_not_throw_fndecl (inner);
1870 this_region->u.must_not_throw.failure_loc
1871 = LOCATION_LOCUS (gimple_location (tp));
1873 /* In order to get mangling applied to this decl, we must mark it
1874 used now. Otherwise, pass_ipa_free_lang_data won't think it
1875 needs to happen. */
1876 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1878 this_state.cur_region = this_region;
1881 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1883 return gimple_try_eval (tp);
1886 /* Implement a cleanup expression. This is similar to try-finally,
1887 except that we only execute the cleanup block for exception edges. */
1889 static gimple_seq
1890 lower_cleanup (struct leh_state *state, gimple tp)
1892 struct leh_state this_state = *state;
1893 eh_region this_region = NULL;
1894 struct leh_tf_state fake_tf;
1895 gimple_seq result;
1896 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1898 if (flag_exceptions && !cleanup_dead)
1900 this_region = gen_eh_region_cleanup (state->cur_region);
1901 this_state.cur_region = this_region;
1904 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1906 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1907 return gimple_try_eval (tp);
1909 /* Build enough of a try-finally state so that we can reuse
1910 honor_protect_cleanup_actions. */
1911 memset (&fake_tf, 0, sizeof (fake_tf));
1912 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1913 fake_tf.outer = state;
1914 fake_tf.region = this_region;
1915 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1916 fake_tf.may_throw = true;
1918 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1920 if (fake_tf.may_throw)
1922 /* In this case honor_protect_cleanup_actions had nothing to do,
1923 and we should process this normally. */
1924 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1925 result = frob_into_branch_around (tp, this_region,
1926 fake_tf.fallthru_label);
1928 else
1930 /* In this case honor_protect_cleanup_actions did nearly all of
1931 the work. All we have left is to append the fallthru_label. */
1933 result = gimple_try_eval (tp);
1934 if (fake_tf.fallthru_label)
1936 gimple x = gimple_build_label (fake_tf.fallthru_label);
1937 gimple_seq_add_stmt (&result, x);
1940 return result;
1943 /* Main loop for lowering eh constructs. Also moves gsi to the next
1944 statement. */
1946 static void
1947 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1949 gimple_seq replace;
1950 gimple x;
1951 gimple stmt = gsi_stmt (*gsi);
1953 switch (gimple_code (stmt))
1955 case GIMPLE_CALL:
1957 tree fndecl = gimple_call_fndecl (stmt);
1958 tree rhs, lhs;
1960 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1961 switch (DECL_FUNCTION_CODE (fndecl))
1963 case BUILT_IN_EH_POINTER:
1964 /* The front end may have generated a call to
1965 __builtin_eh_pointer (0) within a catch region. Replace
1966 this zero argument with the current catch region number. */
1967 if (state->ehp_region)
1969 tree nr = build_int_cst (integer_type_node,
1970 state->ehp_region->index);
1971 gimple_call_set_arg (stmt, 0, nr);
1973 else
1975 /* The user has dome something silly. Remove it. */
1976 rhs = null_pointer_node;
1977 goto do_replace;
1979 break;
1981 case BUILT_IN_EH_FILTER:
1982 /* ??? This should never appear, but since it's a builtin it
1983 is accessible to abuse by users. Just remove it and
1984 replace the use with the arbitrary value zero. */
1985 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1986 do_replace:
1987 lhs = gimple_call_lhs (stmt);
1988 x = gimple_build_assign (lhs, rhs);
1989 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1990 /* FALLTHRU */
1992 case BUILT_IN_EH_COPY_VALUES:
1993 /* Likewise this should not appear. Remove it. */
1994 gsi_remove (gsi, true);
1995 return;
1997 default:
1998 break;
2001 /* FALLTHRU */
2003 case GIMPLE_ASSIGN:
2004 /* If the stmt can throw use a new temporary for the assignment
2005 to a LHS. This makes sure the old value of the LHS is
2006 available on the EH edge. Only do so for statements that
2007 potentially fall through (no noreturn calls e.g.), otherwise
2008 this new assignment might create fake fallthru regions. */
2009 if (stmt_could_throw_p (stmt)
2010 && gimple_has_lhs (stmt)
2011 && gimple_stmt_may_fallthru (stmt)
2012 && !tree_could_throw_p (gimple_get_lhs (stmt))
2013 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2015 tree lhs = gimple_get_lhs (stmt);
2016 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
2017 gimple s = gimple_build_assign (lhs, tmp);
2018 gimple_set_location (s, gimple_location (stmt));
2019 gimple_set_block (s, gimple_block (stmt));
2020 gimple_set_lhs (stmt, tmp);
2021 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2022 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2023 DECL_GIMPLE_REG_P (tmp) = 1;
2024 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2026 /* Look for things that can throw exceptions, and record them. */
2027 if (state->cur_region && stmt_could_throw_p (stmt))
2029 record_stmt_eh_region (state->cur_region, stmt);
2030 note_eh_region_may_contain_throw (state->cur_region);
2032 break;
2034 case GIMPLE_COND:
2035 case GIMPLE_GOTO:
2036 case GIMPLE_RETURN:
2037 maybe_record_in_goto_queue (state, stmt);
2038 break;
2040 case GIMPLE_SWITCH:
2041 verify_norecord_switch_expr (state, stmt);
2042 break;
2044 case GIMPLE_TRY:
2045 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2046 replace = lower_try_finally (state, stmt);
2047 else
2049 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2050 if (!x)
2052 replace = gimple_try_eval (stmt);
2053 lower_eh_constructs_1 (state, &replace);
2055 else
2056 switch (gimple_code (x))
2058 case GIMPLE_CATCH:
2059 replace = lower_catch (state, stmt);
2060 break;
2061 case GIMPLE_EH_FILTER:
2062 replace = lower_eh_filter (state, stmt);
2063 break;
2064 case GIMPLE_EH_MUST_NOT_THROW:
2065 replace = lower_eh_must_not_throw (state, stmt);
2066 break;
2067 case GIMPLE_EH_ELSE:
2068 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2069 gcc_unreachable ();
2070 default:
2071 replace = lower_cleanup (state, stmt);
2072 break;
2076 /* Remove the old stmt and insert the transformed sequence
2077 instead. */
2078 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2079 gsi_remove (gsi, true);
2081 /* Return since we don't want gsi_next () */
2082 return;
2084 case GIMPLE_EH_ELSE:
2085 /* We should be eliminating this in lower_try_finally et al. */
2086 gcc_unreachable ();
2088 default:
2089 /* A type, a decl, or some kind of statement that we're not
2090 interested in. Don't walk them. */
2091 break;
2094 gsi_next (gsi);
2097 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2099 static void
2100 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2102 gimple_stmt_iterator gsi;
2103 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2104 lower_eh_constructs_2 (state, &gsi);
2107 namespace {
2109 const pass_data pass_data_lower_eh =
2111 GIMPLE_PASS, /* type */
2112 "eh", /* name */
2113 OPTGROUP_NONE, /* optinfo_flags */
2114 TV_TREE_EH, /* tv_id */
2115 PROP_gimple_lcf, /* properties_required */
2116 PROP_gimple_leh, /* properties_provided */
2117 0, /* properties_destroyed */
2118 0, /* todo_flags_start */
2119 0, /* todo_flags_finish */
2122 class pass_lower_eh : public gimple_opt_pass
2124 public:
2125 pass_lower_eh (gcc::context *ctxt)
2126 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2129 /* opt_pass methods: */
2130 virtual unsigned int execute (function *);
2132 }; // class pass_lower_eh
2134 unsigned int
2135 pass_lower_eh::execute (function *fun)
2137 struct leh_state null_state;
2138 gimple_seq bodyp;
2140 bodyp = gimple_body (current_function_decl);
2141 if (bodyp == NULL)
2142 return 0;
2144 finally_tree = new hash_table<finally_tree_hasher> (31);
2145 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2146 memset (&null_state, 0, sizeof (null_state));
2148 collect_finally_tree_1 (bodyp, NULL);
2149 lower_eh_constructs_1 (&null_state, &bodyp);
2150 gimple_set_body (current_function_decl, bodyp);
2152 /* We assume there's a return statement, or something, at the end of
2153 the function, and thus ploping the EH sequence afterward won't
2154 change anything. */
2155 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2156 gimple_seq_add_seq (&bodyp, eh_seq);
2158 /* We assume that since BODYP already existed, adding EH_SEQ to it
2159 didn't change its value, and we don't have to re-set the function. */
2160 gcc_assert (bodyp == gimple_body (current_function_decl));
2162 delete finally_tree;
2163 finally_tree = NULL;
2164 BITMAP_FREE (eh_region_may_contain_throw_map);
2165 eh_seq = NULL;
2167 /* If this function needs a language specific EH personality routine
2168 and the frontend didn't already set one do so now. */
2169 if (function_needs_eh_personality (fun) == eh_personality_lang
2170 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2171 DECL_FUNCTION_PERSONALITY (current_function_decl)
2172 = lang_hooks.eh_personality ();
2174 return 0;
2177 } // anon namespace
2179 gimple_opt_pass *
2180 make_pass_lower_eh (gcc::context *ctxt)
2182 return new pass_lower_eh (ctxt);
2185 /* Create the multiple edges from an EH_DISPATCH statement to all of
2186 the possible handlers for its EH region. Return true if there's
2187 no fallthru edge; false if there is. */
2189 bool
2190 make_eh_dispatch_edges (gimple stmt)
2192 eh_region r;
2193 eh_catch c;
2194 basic_block src, dst;
2196 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2197 src = gimple_bb (stmt);
2199 switch (r->type)
2201 case ERT_TRY:
2202 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2204 dst = label_to_block (c->label);
2205 make_edge (src, dst, 0);
2207 /* A catch-all handler doesn't have a fallthru. */
2208 if (c->type_list == NULL)
2209 return false;
2211 break;
2213 case ERT_ALLOWED_EXCEPTIONS:
2214 dst = label_to_block (r->u.allowed.label);
2215 make_edge (src, dst, 0);
2216 break;
2218 default:
2219 gcc_unreachable ();
2222 return true;
2225 /* Create the single EH edge from STMT to its nearest landing pad,
2226 if there is such a landing pad within the current function. */
2228 void
2229 make_eh_edges (gimple stmt)
2231 basic_block src, dst;
2232 eh_landing_pad lp;
2233 int lp_nr;
2235 lp_nr = lookup_stmt_eh_lp (stmt);
2236 if (lp_nr <= 0)
2237 return;
2239 lp = get_eh_landing_pad_from_number (lp_nr);
2240 gcc_assert (lp != NULL);
2242 src = gimple_bb (stmt);
2243 dst = label_to_block (lp->post_landing_pad);
2244 make_edge (src, dst, EDGE_EH);
2247 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2248 do not actually perform the final edge redirection.
2250 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2251 we intend to change the destination EH region as well; this means
2252 EH_LANDING_PAD_NR must already be set on the destination block label.
2253 If false, we're being called from generic cfg manipulation code and we
2254 should preserve our place within the region tree. */
2256 static void
2257 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2259 eh_landing_pad old_lp, new_lp;
2260 basic_block old_bb;
2261 gimple throw_stmt;
2262 int old_lp_nr, new_lp_nr;
2263 tree old_label, new_label;
2264 edge_iterator ei;
2265 edge e;
2267 old_bb = edge_in->dest;
2268 old_label = gimple_block_label (old_bb);
2269 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2270 gcc_assert (old_lp_nr > 0);
2271 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2273 throw_stmt = last_stmt (edge_in->src);
2274 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2276 new_label = gimple_block_label (new_bb);
2278 /* Look for an existing region that might be using NEW_BB already. */
2279 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2280 if (new_lp_nr)
2282 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2283 gcc_assert (new_lp);
2285 /* Unless CHANGE_REGION is true, the new and old landing pad
2286 had better be associated with the same EH region. */
2287 gcc_assert (change_region || new_lp->region == old_lp->region);
2289 else
2291 new_lp = NULL;
2292 gcc_assert (!change_region);
2295 /* Notice when we redirect the last EH edge away from OLD_BB. */
2296 FOR_EACH_EDGE (e, ei, old_bb->preds)
2297 if (e != edge_in && (e->flags & EDGE_EH))
2298 break;
2300 if (new_lp)
2302 /* NEW_LP already exists. If there are still edges into OLD_LP,
2303 there's nothing to do with the EH tree. If there are no more
2304 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2305 If CHANGE_REGION is true, then our caller is expecting to remove
2306 the landing pad. */
2307 if (e == NULL && !change_region)
2308 remove_eh_landing_pad (old_lp);
2310 else
2312 /* No correct landing pad exists. If there are no more edges
2313 into OLD_LP, then we can simply re-use the existing landing pad.
2314 Otherwise, we have to create a new landing pad. */
2315 if (e == NULL)
2317 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2318 new_lp = old_lp;
2320 else
2321 new_lp = gen_eh_landing_pad (old_lp->region);
2322 new_lp->post_landing_pad = new_label;
2323 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2326 /* Maybe move the throwing statement to the new region. */
2327 if (old_lp != new_lp)
2329 remove_stmt_from_eh_lp (throw_stmt);
2330 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2334 /* Redirect EH edge E to NEW_BB. */
2336 edge
2337 redirect_eh_edge (edge edge_in, basic_block new_bb)
2339 redirect_eh_edge_1 (edge_in, new_bb, false);
2340 return ssa_redirect_edge (edge_in, new_bb);
2343 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2344 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2345 The actual edge update will happen in the caller. */
2347 void
2348 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2350 tree new_lab = gimple_block_label (new_bb);
2351 bool any_changed = false;
2352 basic_block old_bb;
2353 eh_region r;
2354 eh_catch c;
2356 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2357 switch (r->type)
2359 case ERT_TRY:
2360 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2362 old_bb = label_to_block (c->label);
2363 if (old_bb == e->dest)
2365 c->label = new_lab;
2366 any_changed = true;
2369 break;
2371 case ERT_ALLOWED_EXCEPTIONS:
2372 old_bb = label_to_block (r->u.allowed.label);
2373 gcc_assert (old_bb == e->dest);
2374 r->u.allowed.label = new_lab;
2375 any_changed = true;
2376 break;
2378 default:
2379 gcc_unreachable ();
2382 gcc_assert (any_changed);
2385 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2387 bool
2388 operation_could_trap_helper_p (enum tree_code op,
2389 bool fp_operation,
2390 bool honor_trapv,
2391 bool honor_nans,
2392 bool honor_snans,
2393 tree divisor,
2394 bool *handled)
2396 *handled = true;
2397 switch (op)
2399 case TRUNC_DIV_EXPR:
2400 case CEIL_DIV_EXPR:
2401 case FLOOR_DIV_EXPR:
2402 case ROUND_DIV_EXPR:
2403 case EXACT_DIV_EXPR:
2404 case CEIL_MOD_EXPR:
2405 case FLOOR_MOD_EXPR:
2406 case ROUND_MOD_EXPR:
2407 case TRUNC_MOD_EXPR:
2408 case RDIV_EXPR:
2409 if (honor_snans || honor_trapv)
2410 return true;
2411 if (fp_operation)
2412 return flag_trapping_math;
2413 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2414 return true;
2415 return false;
2417 case LT_EXPR:
2418 case LE_EXPR:
2419 case GT_EXPR:
2420 case GE_EXPR:
2421 case LTGT_EXPR:
2422 /* Some floating point comparisons may trap. */
2423 return honor_nans;
2425 case EQ_EXPR:
2426 case NE_EXPR:
2427 case UNORDERED_EXPR:
2428 case ORDERED_EXPR:
2429 case UNLT_EXPR:
2430 case UNLE_EXPR:
2431 case UNGT_EXPR:
2432 case UNGE_EXPR:
2433 case UNEQ_EXPR:
2434 return honor_snans;
2436 case CONVERT_EXPR:
2437 case FIX_TRUNC_EXPR:
2438 /* Conversion of floating point might trap. */
2439 return honor_nans;
2441 case NEGATE_EXPR:
2442 case ABS_EXPR:
2443 case CONJ_EXPR:
2444 /* These operations don't trap with floating point. */
2445 if (honor_trapv)
2446 return true;
2447 return false;
2449 case PLUS_EXPR:
2450 case MINUS_EXPR:
2451 case MULT_EXPR:
2452 /* Any floating arithmetic may trap. */
2453 if (fp_operation && flag_trapping_math)
2454 return true;
2455 if (honor_trapv)
2456 return true;
2457 return false;
2459 case COMPLEX_EXPR:
2460 case CONSTRUCTOR:
2461 /* Constructing an object cannot trap. */
2462 return false;
2464 default:
2465 /* Any floating arithmetic may trap. */
2466 if (fp_operation && flag_trapping_math)
2467 return true;
2469 *handled = false;
2470 return false;
2474 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2475 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2476 type operands that may trap. If OP is a division operator, DIVISOR contains
2477 the value of the divisor. */
2479 bool
2480 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2481 tree divisor)
2483 bool honor_nans = (fp_operation && flag_trapping_math
2484 && !flag_finite_math_only);
2485 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2486 bool handled;
2488 if (TREE_CODE_CLASS (op) != tcc_comparison
2489 && TREE_CODE_CLASS (op) != tcc_unary
2490 && TREE_CODE_CLASS (op) != tcc_binary)
2491 return false;
2493 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2494 honor_nans, honor_snans, divisor,
2495 &handled);
2499 /* Returns true if it is possible to prove that the index of
2500 an array access REF (an ARRAY_REF expression) falls into the
2501 array bounds. */
2503 static bool
2504 in_array_bounds_p (tree ref)
2506 tree idx = TREE_OPERAND (ref, 1);
2507 tree min, max;
2509 if (TREE_CODE (idx) != INTEGER_CST)
2510 return false;
2512 min = array_ref_low_bound (ref);
2513 max = array_ref_up_bound (ref);
2514 if (!min
2515 || !max
2516 || TREE_CODE (min) != INTEGER_CST
2517 || TREE_CODE (max) != INTEGER_CST)
2518 return false;
2520 if (tree_int_cst_lt (idx, min)
2521 || tree_int_cst_lt (max, idx))
2522 return false;
2524 return true;
2527 /* Returns true if it is possible to prove that the range of
2528 an array access REF (an ARRAY_RANGE_REF expression) falls
2529 into the array bounds. */
2531 static bool
2532 range_in_array_bounds_p (tree ref)
2534 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2535 tree range_min, range_max, min, max;
2537 range_min = TYPE_MIN_VALUE (domain_type);
2538 range_max = TYPE_MAX_VALUE (domain_type);
2539 if (!range_min
2540 || !range_max
2541 || TREE_CODE (range_min) != INTEGER_CST
2542 || TREE_CODE (range_max) != INTEGER_CST)
2543 return false;
2545 min = array_ref_low_bound (ref);
2546 max = array_ref_up_bound (ref);
2547 if (!min
2548 || !max
2549 || TREE_CODE (min) != INTEGER_CST
2550 || TREE_CODE (max) != INTEGER_CST)
2551 return false;
2553 if (tree_int_cst_lt (range_min, min)
2554 || tree_int_cst_lt (max, range_max))
2555 return false;
2557 return true;
2560 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2561 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2562 This routine expects only GIMPLE lhs or rhs input. */
2564 bool
2565 tree_could_trap_p (tree expr)
2567 enum tree_code code;
2568 bool fp_operation = false;
2569 bool honor_trapv = false;
2570 tree t, base, div = NULL_TREE;
2572 if (!expr)
2573 return false;
2575 code = TREE_CODE (expr);
2576 t = TREE_TYPE (expr);
2578 if (t)
2580 if (COMPARISON_CLASS_P (expr))
2581 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2582 else
2583 fp_operation = FLOAT_TYPE_P (t);
2584 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2587 if (TREE_CODE_CLASS (code) == tcc_binary)
2588 div = TREE_OPERAND (expr, 1);
2589 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2590 return true;
2592 restart:
2593 switch (code)
2595 case COMPONENT_REF:
2596 case REALPART_EXPR:
2597 case IMAGPART_EXPR:
2598 case BIT_FIELD_REF:
2599 case VIEW_CONVERT_EXPR:
2600 case WITH_SIZE_EXPR:
2601 expr = TREE_OPERAND (expr, 0);
2602 code = TREE_CODE (expr);
2603 goto restart;
2605 case ARRAY_RANGE_REF:
2606 base = TREE_OPERAND (expr, 0);
2607 if (tree_could_trap_p (base))
2608 return true;
2609 if (TREE_THIS_NOTRAP (expr))
2610 return false;
2611 return !range_in_array_bounds_p (expr);
2613 case ARRAY_REF:
2614 base = TREE_OPERAND (expr, 0);
2615 if (tree_could_trap_p (base))
2616 return true;
2617 if (TREE_THIS_NOTRAP (expr))
2618 return false;
2619 return !in_array_bounds_p (expr);
2621 case TARGET_MEM_REF:
2622 case MEM_REF:
2623 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2624 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2625 return true;
2626 if (TREE_THIS_NOTRAP (expr))
2627 return false;
2628 /* We cannot prove that the access is in-bounds when we have
2629 variable-index TARGET_MEM_REFs. */
2630 if (code == TARGET_MEM_REF
2631 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2632 return true;
2633 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2635 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2636 offset_int off = mem_ref_offset (expr);
2637 if (wi::neg_p (off, SIGNED))
2638 return true;
2639 if (TREE_CODE (base) == STRING_CST)
2640 return wi::leu_p (TREE_STRING_LENGTH (base), off);
2641 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2642 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2643 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
2644 return true;
2645 /* Now we are sure the first byte of the access is inside
2646 the object. */
2647 return false;
2649 return true;
2651 case INDIRECT_REF:
2652 return !TREE_THIS_NOTRAP (expr);
2654 case ASM_EXPR:
2655 return TREE_THIS_VOLATILE (expr);
2657 case CALL_EXPR:
2658 t = get_callee_fndecl (expr);
2659 /* Assume that calls to weak functions may trap. */
2660 if (!t || !DECL_P (t))
2661 return true;
2662 if (DECL_WEAK (t))
2663 return tree_could_trap_p (t);
2664 return false;
2666 case FUNCTION_DECL:
2667 /* Assume that accesses to weak functions may trap, unless we know
2668 they are certainly defined in current TU or in some other
2669 LTO partition. */
2670 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2672 cgraph_node *node = cgraph_node::get (expr);
2673 if (node)
2674 node = node->function_symbol ();
2675 return !(node && node->in_other_partition);
2677 return false;
2679 case VAR_DECL:
2680 /* Assume that accesses to weak vars may trap, unless we know
2681 they are certainly defined in current TU or in some other
2682 LTO partition. */
2683 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2685 varpool_node *node = varpool_node::get (expr);
2686 if (node)
2687 node = node->ultimate_alias_target ();
2688 return !(node && node->in_other_partition);
2690 return false;
2692 default:
2693 return false;
2698 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2699 an assignment or a conditional) may throw. */
2701 static bool
2702 stmt_could_throw_1_p (gimple stmt)
2704 enum tree_code code = gimple_expr_code (stmt);
2705 bool honor_nans = false;
2706 bool honor_snans = false;
2707 bool fp_operation = false;
2708 bool honor_trapv = false;
2709 tree t;
2710 size_t i;
2711 bool handled, ret;
2713 if (TREE_CODE_CLASS (code) == tcc_comparison
2714 || TREE_CODE_CLASS (code) == tcc_unary
2715 || TREE_CODE_CLASS (code) == tcc_binary)
2717 if (is_gimple_assign (stmt)
2718 && TREE_CODE_CLASS (code) == tcc_comparison)
2719 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2720 else if (gimple_code (stmt) == GIMPLE_COND)
2721 t = TREE_TYPE (gimple_cond_lhs (stmt));
2722 else
2723 t = gimple_expr_type (stmt);
2724 fp_operation = FLOAT_TYPE_P (t);
2725 if (fp_operation)
2727 honor_nans = flag_trapping_math && !flag_finite_math_only;
2728 honor_snans = flag_signaling_nans != 0;
2730 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2731 honor_trapv = true;
2734 /* Check if the main expression may trap. */
2735 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2736 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2737 honor_nans, honor_snans, t,
2738 &handled);
2739 if (handled)
2740 return ret;
2742 /* If the expression does not trap, see if any of the individual operands may
2743 trap. */
2744 for (i = 0; i < gimple_num_ops (stmt); i++)
2745 if (tree_could_trap_p (gimple_op (stmt, i)))
2746 return true;
2748 return false;
2752 /* Return true if statement STMT could throw an exception. */
2754 bool
2755 stmt_could_throw_p (gimple stmt)
2757 if (!flag_exceptions)
2758 return false;
2760 /* The only statements that can throw an exception are assignments,
2761 conditionals, calls, resx, and asms. */
2762 switch (gimple_code (stmt))
2764 case GIMPLE_RESX:
2765 return true;
2767 case GIMPLE_CALL:
2768 return !gimple_call_nothrow_p (stmt);
2770 case GIMPLE_ASSIGN:
2771 case GIMPLE_COND:
2772 if (!cfun->can_throw_non_call_exceptions)
2773 return false;
2774 return stmt_could_throw_1_p (stmt);
2776 case GIMPLE_ASM:
2777 if (!cfun->can_throw_non_call_exceptions)
2778 return false;
2779 return gimple_asm_volatile_p (stmt);
2781 default:
2782 return false;
2787 /* Return true if expression T could throw an exception. */
2789 bool
2790 tree_could_throw_p (tree t)
2792 if (!flag_exceptions)
2793 return false;
2794 if (TREE_CODE (t) == MODIFY_EXPR)
2796 if (cfun->can_throw_non_call_exceptions
2797 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2798 return true;
2799 t = TREE_OPERAND (t, 1);
2802 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2803 t = TREE_OPERAND (t, 0);
2804 if (TREE_CODE (t) == CALL_EXPR)
2805 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2806 if (cfun->can_throw_non_call_exceptions)
2807 return tree_could_trap_p (t);
2808 return false;
2811 /* Return true if STMT can throw an exception that is not caught within
2812 the current function (CFUN). */
2814 bool
2815 stmt_can_throw_external (gimple stmt)
2817 int lp_nr;
2819 if (!stmt_could_throw_p (stmt))
2820 return false;
2822 lp_nr = lookup_stmt_eh_lp (stmt);
2823 return lp_nr == 0;
2826 /* Return true if STMT can throw an exception that is caught within
2827 the current function (CFUN). */
2829 bool
2830 stmt_can_throw_internal (gimple stmt)
2832 int lp_nr;
2834 if (!stmt_could_throw_p (stmt))
2835 return false;
2837 lp_nr = lookup_stmt_eh_lp (stmt);
2838 return lp_nr > 0;
2841 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2842 remove any entry it might have from the EH table. Return true if
2843 any change was made. */
2845 bool
2846 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2848 if (stmt_could_throw_p (stmt))
2849 return false;
2850 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2853 /* Likewise, but always use the current function. */
2855 bool
2856 maybe_clean_eh_stmt (gimple stmt)
2858 return maybe_clean_eh_stmt_fn (cfun, stmt);
2861 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2862 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2863 in the table if it should be in there. Return TRUE if a replacement was
2864 done that my require an EH edge purge. */
2866 bool
2867 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2869 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2871 if (lp_nr != 0)
2873 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2875 if (new_stmt == old_stmt && new_stmt_could_throw)
2876 return false;
2878 remove_stmt_from_eh_lp (old_stmt);
2879 if (new_stmt_could_throw)
2881 add_stmt_to_eh_lp (new_stmt, lp_nr);
2882 return false;
2884 else
2885 return true;
2888 return false;
2891 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2892 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2893 operand is the return value of duplicate_eh_regions. */
2895 bool
2896 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2897 struct function *old_fun, gimple old_stmt,
2898 hash_map<void *, void *> *map,
2899 int default_lp_nr)
2901 int old_lp_nr, new_lp_nr;
2903 if (!stmt_could_throw_p (new_stmt))
2904 return false;
2906 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2907 if (old_lp_nr == 0)
2909 if (default_lp_nr == 0)
2910 return false;
2911 new_lp_nr = default_lp_nr;
2913 else if (old_lp_nr > 0)
2915 eh_landing_pad old_lp, new_lp;
2917 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2918 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
2919 new_lp_nr = new_lp->index;
2921 else
2923 eh_region old_r, new_r;
2925 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2926 new_r = static_cast<eh_region> (*map->get (old_r));
2927 new_lp_nr = -new_r->index;
2930 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2931 return true;
2934 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2935 and thus no remapping is required. */
2937 bool
2938 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2940 int lp_nr;
2942 if (!stmt_could_throw_p (new_stmt))
2943 return false;
2945 lp_nr = lookup_stmt_eh_lp (old_stmt);
2946 if (lp_nr == 0)
2947 return false;
2949 add_stmt_to_eh_lp (new_stmt, lp_nr);
2950 return true;
2953 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2954 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2955 this only handles handlers consisting of a single call, as that's the
2956 important case for C++: a destructor call for a particular object showing
2957 up in multiple handlers. */
2959 static bool
2960 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2962 gimple_stmt_iterator gsi;
2963 gimple ones, twos;
2964 unsigned int ai;
2966 gsi = gsi_start (oneh);
2967 if (!gsi_one_before_end_p (gsi))
2968 return false;
2969 ones = gsi_stmt (gsi);
2971 gsi = gsi_start (twoh);
2972 if (!gsi_one_before_end_p (gsi))
2973 return false;
2974 twos = gsi_stmt (gsi);
2976 if (!is_gimple_call (ones)
2977 || !is_gimple_call (twos)
2978 || gimple_call_lhs (ones)
2979 || gimple_call_lhs (twos)
2980 || gimple_call_chain (ones)
2981 || gimple_call_chain (twos)
2982 || !gimple_call_same_target_p (ones, twos)
2983 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2984 return false;
2986 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2987 if (!operand_equal_p (gimple_call_arg (ones, ai),
2988 gimple_call_arg (twos, ai), 0))
2989 return false;
2991 return true;
2994 /* Optimize
2995 try { A() } finally { try { ~B() } catch { ~A() } }
2996 try { ... } finally { ~A() }
2997 into
2998 try { A() } catch { ~B() }
2999 try { ~B() ... } finally { ~A() }
3001 This occurs frequently in C++, where A is a local variable and B is a
3002 temporary used in the initializer for A. */
3004 static void
3005 optimize_double_finally (gimple one, gimple two)
3007 gimple oneh;
3008 gimple_stmt_iterator gsi;
3009 gimple_seq cleanup;
3011 cleanup = gimple_try_cleanup (one);
3012 gsi = gsi_start (cleanup);
3013 if (!gsi_one_before_end_p (gsi))
3014 return;
3016 oneh = gsi_stmt (gsi);
3017 if (gimple_code (oneh) != GIMPLE_TRY
3018 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3019 return;
3021 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3023 gimple_seq seq = gimple_try_eval (oneh);
3025 gimple_try_set_cleanup (one, seq);
3026 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3027 seq = copy_gimple_seq_and_replace_locals (seq);
3028 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3029 gimple_try_set_eval (two, seq);
3033 /* Perform EH refactoring optimizations that are simpler to do when code
3034 flow has been lowered but EH structures haven't. */
3036 static void
3037 refactor_eh_r (gimple_seq seq)
3039 gimple_stmt_iterator gsi;
3040 gimple one, two;
3042 one = NULL;
3043 two = NULL;
3044 gsi = gsi_start (seq);
3045 while (1)
3047 one = two;
3048 if (gsi_end_p (gsi))
3049 two = NULL;
3050 else
3051 two = gsi_stmt (gsi);
3052 if (one
3053 && two
3054 && gimple_code (one) == GIMPLE_TRY
3055 && gimple_code (two) == GIMPLE_TRY
3056 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
3057 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
3058 optimize_double_finally (one, two);
3059 if (one)
3060 switch (gimple_code (one))
3062 case GIMPLE_TRY:
3063 refactor_eh_r (gimple_try_eval (one));
3064 refactor_eh_r (gimple_try_cleanup (one));
3065 break;
3066 case GIMPLE_CATCH:
3067 refactor_eh_r (gimple_catch_handler (one));
3068 break;
3069 case GIMPLE_EH_FILTER:
3070 refactor_eh_r (gimple_eh_filter_failure (one));
3071 break;
3072 case GIMPLE_EH_ELSE:
3073 refactor_eh_r (gimple_eh_else_n_body (one));
3074 refactor_eh_r (gimple_eh_else_e_body (one));
3075 break;
3076 default:
3077 break;
3079 if (two)
3080 gsi_next (&gsi);
3081 else
3082 break;
3086 namespace {
3088 const pass_data pass_data_refactor_eh =
3090 GIMPLE_PASS, /* type */
3091 "ehopt", /* name */
3092 OPTGROUP_NONE, /* optinfo_flags */
3093 TV_TREE_EH, /* tv_id */
3094 PROP_gimple_lcf, /* properties_required */
3095 0, /* properties_provided */
3096 0, /* properties_destroyed */
3097 0, /* todo_flags_start */
3098 0, /* todo_flags_finish */
3101 class pass_refactor_eh : public gimple_opt_pass
3103 public:
3104 pass_refactor_eh (gcc::context *ctxt)
3105 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3108 /* opt_pass methods: */
3109 virtual bool gate (function *) { return flag_exceptions != 0; }
3110 virtual unsigned int execute (function *)
3112 refactor_eh_r (gimple_body (current_function_decl));
3113 return 0;
3116 }; // class pass_refactor_eh
3118 } // anon namespace
3120 gimple_opt_pass *
3121 make_pass_refactor_eh (gcc::context *ctxt)
3123 return new pass_refactor_eh (ctxt);
3126 /* At the end of gimple optimization, we can lower RESX. */
3128 static bool
3129 lower_resx (basic_block bb, gimple stmt, hash_map<eh_region, tree> *mnt_map)
3131 int lp_nr;
3132 eh_region src_r, dst_r;
3133 gimple_stmt_iterator gsi;
3134 gimple x;
3135 tree fn, src_nr;
3136 bool ret = false;
3138 lp_nr = lookup_stmt_eh_lp (stmt);
3139 if (lp_nr != 0)
3140 dst_r = get_eh_region_from_lp_number (lp_nr);
3141 else
3142 dst_r = NULL;
3144 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3145 gsi = gsi_last_bb (bb);
3147 if (src_r == NULL)
3149 /* We can wind up with no source region when pass_cleanup_eh shows
3150 that there are no entries into an eh region and deletes it, but
3151 then the block that contains the resx isn't removed. This can
3152 happen without optimization when the switch statement created by
3153 lower_try_finally_switch isn't simplified to remove the eh case.
3155 Resolve this by expanding the resx node to an abort. */
3157 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3158 x = gimple_build_call (fn, 0);
3159 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3161 while (EDGE_COUNT (bb->succs) > 0)
3162 remove_edge (EDGE_SUCC (bb, 0));
3164 else if (dst_r)
3166 /* When we have a destination region, we resolve this by copying
3167 the excptr and filter values into place, and changing the edge
3168 to immediately after the landing pad. */
3169 edge e;
3171 if (lp_nr < 0)
3173 basic_block new_bb;
3174 tree lab;
3176 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3177 the failure decl into a new block, if needed. */
3178 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3180 tree *slot = mnt_map->get (dst_r);
3181 if (slot == NULL)
3183 gimple_stmt_iterator gsi2;
3185 new_bb = create_empty_bb (bb);
3186 add_bb_to_loop (new_bb, bb->loop_father);
3187 lab = gimple_block_label (new_bb);
3188 gsi2 = gsi_start_bb (new_bb);
3190 fn = dst_r->u.must_not_throw.failure_decl;
3191 x = gimple_build_call (fn, 0);
3192 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3193 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3195 mnt_map->put (dst_r, lab);
3197 else
3199 lab = *slot;
3200 new_bb = label_to_block (lab);
3203 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3204 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3205 e->count = bb->count;
3206 e->probability = REG_BR_PROB_BASE;
3208 else
3210 edge_iterator ei;
3211 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3213 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3214 src_nr = build_int_cst (integer_type_node, src_r->index);
3215 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3216 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3218 /* Update the flags for the outgoing edge. */
3219 e = single_succ_edge (bb);
3220 gcc_assert (e->flags & EDGE_EH);
3221 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3223 /* If there are no more EH users of the landing pad, delete it. */
3224 FOR_EACH_EDGE (e, ei, e->dest->preds)
3225 if (e->flags & EDGE_EH)
3226 break;
3227 if (e == NULL)
3229 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3230 remove_eh_landing_pad (lp);
3234 ret = true;
3236 else
3238 tree var;
3240 /* When we don't have a destination region, this exception escapes
3241 up the call chain. We resolve this by generating a call to the
3242 _Unwind_Resume library function. */
3244 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3245 with no arguments for C++ and Java. Check for that. */
3246 if (src_r->use_cxa_end_cleanup)
3248 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3249 x = gimple_build_call (fn, 0);
3250 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3252 else
3254 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3255 src_nr = build_int_cst (integer_type_node, src_r->index);
3256 x = gimple_build_call (fn, 1, src_nr);
3257 var = create_tmp_var (ptr_type_node, NULL);
3258 var = make_ssa_name (var, x);
3259 gimple_call_set_lhs (x, var);
3260 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3262 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3263 x = gimple_build_call (fn, 1, var);
3264 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3267 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3270 gsi_remove (&gsi, true);
3272 return ret;
3275 namespace {
3277 const pass_data pass_data_lower_resx =
3279 GIMPLE_PASS, /* type */
3280 "resx", /* name */
3281 OPTGROUP_NONE, /* optinfo_flags */
3282 TV_TREE_EH, /* tv_id */
3283 PROP_gimple_lcf, /* properties_required */
3284 0, /* properties_provided */
3285 0, /* properties_destroyed */
3286 0, /* todo_flags_start */
3287 0, /* todo_flags_finish */
3290 class pass_lower_resx : public gimple_opt_pass
3292 public:
3293 pass_lower_resx (gcc::context *ctxt)
3294 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3297 /* opt_pass methods: */
3298 virtual bool gate (function *) { return flag_exceptions != 0; }
3299 virtual unsigned int execute (function *);
3301 }; // class pass_lower_resx
3303 unsigned
3304 pass_lower_resx::execute (function *fun)
3306 basic_block bb;
3307 bool dominance_invalidated = false;
3308 bool any_rewritten = false;
3310 hash_map<eh_region, tree> mnt_map;
3312 FOR_EACH_BB_FN (bb, fun)
3314 gimple last = last_stmt (bb);
3315 if (last && is_gimple_resx (last))
3317 dominance_invalidated |= lower_resx (bb, last, &mnt_map);
3318 any_rewritten = true;
3322 if (dominance_invalidated)
3324 free_dominance_info (CDI_DOMINATORS);
3325 free_dominance_info (CDI_POST_DOMINATORS);
3328 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3331 } // anon namespace
3333 gimple_opt_pass *
3334 make_pass_lower_resx (gcc::context *ctxt)
3336 return new pass_lower_resx (ctxt);
3339 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3340 external throw. */
3342 static void
3343 optimize_clobbers (basic_block bb)
3345 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3346 bool any_clobbers = false;
3347 bool seen_stack_restore = false;
3348 edge_iterator ei;
3349 edge e;
3351 /* Only optimize anything if the bb contains at least one clobber,
3352 ends with resx (checked by caller), optionally contains some
3353 debug stmts or labels, or at most one __builtin_stack_restore
3354 call, and has an incoming EH edge. */
3355 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3357 gimple stmt = gsi_stmt (gsi);
3358 if (is_gimple_debug (stmt))
3359 continue;
3360 if (gimple_clobber_p (stmt))
3362 any_clobbers = true;
3363 continue;
3365 if (!seen_stack_restore
3366 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3368 seen_stack_restore = true;
3369 continue;
3371 if (gimple_code (stmt) == GIMPLE_LABEL)
3372 break;
3373 return;
3375 if (!any_clobbers)
3376 return;
3377 FOR_EACH_EDGE (e, ei, bb->preds)
3378 if (e->flags & EDGE_EH)
3379 break;
3380 if (e == NULL)
3381 return;
3382 gsi = gsi_last_bb (bb);
3383 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3385 gimple stmt = gsi_stmt (gsi);
3386 if (!gimple_clobber_p (stmt))
3387 continue;
3388 unlink_stmt_vdef (stmt);
3389 gsi_remove (&gsi, true);
3390 release_defs (stmt);
3394 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3395 internal throw to successor BB. */
3397 static int
3398 sink_clobbers (basic_block bb)
3400 edge e;
3401 edge_iterator ei;
3402 gimple_stmt_iterator gsi, dgsi;
3403 basic_block succbb;
3404 bool any_clobbers = false;
3405 unsigned todo = 0;
3407 /* Only optimize if BB has a single EH successor and
3408 all predecessor edges are EH too. */
3409 if (!single_succ_p (bb)
3410 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3411 return 0;
3413 FOR_EACH_EDGE (e, ei, bb->preds)
3415 if ((e->flags & EDGE_EH) == 0)
3416 return 0;
3419 /* And BB contains only CLOBBER stmts before the final
3420 RESX. */
3421 gsi = gsi_last_bb (bb);
3422 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3424 gimple stmt = gsi_stmt (gsi);
3425 if (is_gimple_debug (stmt))
3426 continue;
3427 if (gimple_code (stmt) == GIMPLE_LABEL)
3428 break;
3429 if (!gimple_clobber_p (stmt))
3430 return 0;
3431 any_clobbers = true;
3433 if (!any_clobbers)
3434 return 0;
3436 edge succe = single_succ_edge (bb);
3437 succbb = succe->dest;
3439 /* See if there is a virtual PHI node to take an updated virtual
3440 operand from. */
3441 gimple vphi = NULL;
3442 tree vuse = NULL_TREE;
3443 for (gsi = gsi_start_phis (succbb); !gsi_end_p (gsi); gsi_next (&gsi))
3445 tree res = gimple_phi_result (gsi_stmt (gsi));
3446 if (virtual_operand_p (res))
3448 vphi = gsi_stmt (gsi);
3449 vuse = res;
3450 break;
3454 dgsi = gsi_after_labels (succbb);
3455 gsi = gsi_last_bb (bb);
3456 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3458 gimple stmt = gsi_stmt (gsi);
3459 tree lhs;
3460 if (is_gimple_debug (stmt))
3461 continue;
3462 if (gimple_code (stmt) == GIMPLE_LABEL)
3463 break;
3464 lhs = gimple_assign_lhs (stmt);
3465 /* Unfortunately we don't have dominance info updated at this
3466 point, so checking if
3467 dominated_by_p (CDI_DOMINATORS, succbb,
3468 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3469 would be too costly. Thus, avoid sinking any clobbers that
3470 refer to non-(D) SSA_NAMEs. */
3471 if (TREE_CODE (lhs) == MEM_REF
3472 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3473 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3475 unlink_stmt_vdef (stmt);
3476 gsi_remove (&gsi, true);
3477 release_defs (stmt);
3478 continue;
3481 /* As we do not change stmt order when sinking across a
3482 forwarder edge we can keep virtual operands in place. */
3483 gsi_remove (&gsi, false);
3484 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3486 /* But adjust virtual operands if we sunk across a PHI node. */
3487 if (vuse)
3489 gimple use_stmt;
3490 imm_use_iterator iter;
3491 use_operand_p use_p;
3492 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3493 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3494 SET_USE (use_p, gimple_vdef (stmt));
3495 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3497 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3498 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3500 /* Adjust the incoming virtual operand. */
3501 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3502 SET_USE (gimple_vuse_op (stmt), vuse);
3504 /* If there isn't a single predecessor but no virtual PHI node
3505 arrange for virtual operands to be renamed. */
3506 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3507 && !single_pred_p (succbb))
3509 /* In this case there will be no use of the VDEF of this stmt.
3510 ??? Unless this is a secondary opportunity and we have not
3511 removed unreachable blocks yet, so we cannot assert this.
3512 Which also means we will end up renaming too many times. */
3513 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3514 mark_virtual_operands_for_renaming (cfun);
3515 todo |= TODO_update_ssa_only_virtuals;
3519 return todo;
3522 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3523 we have found some duplicate labels and removed some edges. */
3525 static bool
3526 lower_eh_dispatch (basic_block src, gimple stmt)
3528 gimple_stmt_iterator gsi;
3529 int region_nr;
3530 eh_region r;
3531 tree filter, fn;
3532 gimple x;
3533 bool redirected = false;
3535 region_nr = gimple_eh_dispatch_region (stmt);
3536 r = get_eh_region_from_number (region_nr);
3538 gsi = gsi_last_bb (src);
3540 switch (r->type)
3542 case ERT_TRY:
3544 auto_vec<tree> labels;
3545 tree default_label = NULL;
3546 eh_catch c;
3547 edge_iterator ei;
3548 edge e;
3549 hash_set<tree> seen_values;
3551 /* Collect the labels for a switch. Zero the post_landing_pad
3552 field becase we'll no longer have anything keeping these labels
3553 in existence and the optimizer will be free to merge these
3554 blocks at will. */
3555 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3557 tree tp_node, flt_node, lab = c->label;
3558 bool have_label = false;
3560 c->label = NULL;
3561 tp_node = c->type_list;
3562 flt_node = c->filter_list;
3564 if (tp_node == NULL)
3566 default_label = lab;
3567 break;
3571 /* Filter out duplicate labels that arise when this handler
3572 is shadowed by an earlier one. When no labels are
3573 attached to the handler anymore, we remove
3574 the corresponding edge and then we delete unreachable
3575 blocks at the end of this pass. */
3576 if (! seen_values.contains (TREE_VALUE (flt_node)))
3578 tree t = build_case_label (TREE_VALUE (flt_node),
3579 NULL, lab);
3580 labels.safe_push (t);
3581 seen_values.add (TREE_VALUE (flt_node));
3582 have_label = true;
3585 tp_node = TREE_CHAIN (tp_node);
3586 flt_node = TREE_CHAIN (flt_node);
3588 while (tp_node);
3589 if (! have_label)
3591 remove_edge (find_edge (src, label_to_block (lab)));
3592 redirected = true;
3596 /* Clean up the edge flags. */
3597 FOR_EACH_EDGE (e, ei, src->succs)
3599 if (e->flags & EDGE_FALLTHRU)
3601 /* If there was no catch-all, use the fallthru edge. */
3602 if (default_label == NULL)
3603 default_label = gimple_block_label (e->dest);
3604 e->flags &= ~EDGE_FALLTHRU;
3607 gcc_assert (default_label != NULL);
3609 /* Don't generate a switch if there's only a default case.
3610 This is common in the form of try { A; } catch (...) { B; }. */
3611 if (!labels.exists ())
3613 e = single_succ_edge (src);
3614 e->flags |= EDGE_FALLTHRU;
3616 else
3618 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3619 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3620 region_nr));
3621 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3622 filter = make_ssa_name (filter, x);
3623 gimple_call_set_lhs (x, filter);
3624 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3626 /* Turn the default label into a default case. */
3627 default_label = build_case_label (NULL, NULL, default_label);
3628 sort_case_labels (labels);
3630 x = gimple_build_switch (filter, default_label, labels);
3631 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3634 break;
3636 case ERT_ALLOWED_EXCEPTIONS:
3638 edge b_e = BRANCH_EDGE (src);
3639 edge f_e = FALLTHRU_EDGE (src);
3641 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3642 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3643 region_nr));
3644 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3645 filter = make_ssa_name (filter, x);
3646 gimple_call_set_lhs (x, filter);
3647 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3649 r->u.allowed.label = NULL;
3650 x = gimple_build_cond (EQ_EXPR, filter,
3651 build_int_cst (TREE_TYPE (filter),
3652 r->u.allowed.filter),
3653 NULL_TREE, NULL_TREE);
3654 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3656 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3657 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3659 break;
3661 default:
3662 gcc_unreachable ();
3665 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3666 gsi_remove (&gsi, true);
3667 return redirected;
3670 namespace {
3672 const pass_data pass_data_lower_eh_dispatch =
3674 GIMPLE_PASS, /* type */
3675 "ehdisp", /* name */
3676 OPTGROUP_NONE, /* optinfo_flags */
3677 TV_TREE_EH, /* tv_id */
3678 PROP_gimple_lcf, /* properties_required */
3679 0, /* properties_provided */
3680 0, /* properties_destroyed */
3681 0, /* todo_flags_start */
3682 0, /* todo_flags_finish */
3685 class pass_lower_eh_dispatch : public gimple_opt_pass
3687 public:
3688 pass_lower_eh_dispatch (gcc::context *ctxt)
3689 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3692 /* opt_pass methods: */
3693 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3694 virtual unsigned int execute (function *);
3696 }; // class pass_lower_eh_dispatch
3698 unsigned
3699 pass_lower_eh_dispatch::execute (function *fun)
3701 basic_block bb;
3702 int flags = 0;
3703 bool redirected = false;
3705 assign_filter_values ();
3707 FOR_EACH_BB_FN (bb, fun)
3709 gimple last = last_stmt (bb);
3710 if (last == NULL)
3711 continue;
3712 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3714 redirected |= lower_eh_dispatch (bb, last);
3715 flags |= TODO_update_ssa_only_virtuals;
3717 else if (gimple_code (last) == GIMPLE_RESX)
3719 if (stmt_can_throw_external (last))
3720 optimize_clobbers (bb);
3721 else
3722 flags |= sink_clobbers (bb);
3726 if (redirected)
3727 delete_unreachable_blocks ();
3728 return flags;
3731 } // anon namespace
3733 gimple_opt_pass *
3734 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3736 return new pass_lower_eh_dispatch (ctxt);
3739 /* Walk statements, see what regions and, optionally, landing pads
3740 are really referenced.
3742 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3743 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3745 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3746 regions are marked.
3748 The caller is responsible for freeing the returned sbitmaps. */
3750 static void
3751 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3753 sbitmap r_reachable, lp_reachable;
3754 basic_block bb;
3755 bool mark_landing_pads = (lp_reachablep != NULL);
3756 gcc_checking_assert (r_reachablep != NULL);
3758 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3759 bitmap_clear (r_reachable);
3760 *r_reachablep = r_reachable;
3762 if (mark_landing_pads)
3764 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3765 bitmap_clear (lp_reachable);
3766 *lp_reachablep = lp_reachable;
3768 else
3769 lp_reachable = NULL;
3771 FOR_EACH_BB_FN (bb, cfun)
3773 gimple_stmt_iterator gsi;
3775 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3777 gimple stmt = gsi_stmt (gsi);
3779 if (mark_landing_pads)
3781 int lp_nr = lookup_stmt_eh_lp (stmt);
3783 /* Negative LP numbers are MUST_NOT_THROW regions which
3784 are not considered BB enders. */
3785 if (lp_nr < 0)
3786 bitmap_set_bit (r_reachable, -lp_nr);
3788 /* Positive LP numbers are real landing pads, and BB enders. */
3789 else if (lp_nr > 0)
3791 gcc_assert (gsi_one_before_end_p (gsi));
3792 eh_region region = get_eh_region_from_lp_number (lp_nr);
3793 bitmap_set_bit (r_reachable, region->index);
3794 bitmap_set_bit (lp_reachable, lp_nr);
3798 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3799 switch (gimple_code (stmt))
3801 case GIMPLE_RESX:
3802 bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
3803 break;
3804 case GIMPLE_EH_DISPATCH:
3805 bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
3806 break;
3807 default:
3808 break;
3814 /* Remove unreachable handlers and unreachable landing pads. */
3816 static void
3817 remove_unreachable_handlers (void)
3819 sbitmap r_reachable, lp_reachable;
3820 eh_region region;
3821 eh_landing_pad lp;
3822 unsigned i;
3824 mark_reachable_handlers (&r_reachable, &lp_reachable);
3826 if (dump_file)
3828 fprintf (dump_file, "Before removal of unreachable regions:\n");
3829 dump_eh_tree (dump_file, cfun);
3830 fprintf (dump_file, "Reachable regions: ");
3831 dump_bitmap_file (dump_file, r_reachable);
3832 fprintf (dump_file, "Reachable landing pads: ");
3833 dump_bitmap_file (dump_file, lp_reachable);
3836 if (dump_file)
3838 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3839 if (region && !bitmap_bit_p (r_reachable, region->index))
3840 fprintf (dump_file,
3841 "Removing unreachable region %d\n",
3842 region->index);
3845 remove_unreachable_eh_regions (r_reachable);
3847 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3848 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3850 if (dump_file)
3851 fprintf (dump_file,
3852 "Removing unreachable landing pad %d\n",
3853 lp->index);
3854 remove_eh_landing_pad (lp);
3857 if (dump_file)
3859 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3860 dump_eh_tree (dump_file, cfun);
3861 fprintf (dump_file, "\n\n");
3864 sbitmap_free (r_reachable);
3865 sbitmap_free (lp_reachable);
3867 #ifdef ENABLE_CHECKING
3868 verify_eh_tree (cfun);
3869 #endif
3872 /* Remove unreachable handlers if any landing pads have been removed after
3873 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3875 void
3876 maybe_remove_unreachable_handlers (void)
3878 eh_landing_pad lp;
3879 unsigned i;
3881 if (cfun->eh == NULL)
3882 return;
3884 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3885 if (lp && lp->post_landing_pad)
3887 if (label_to_block (lp->post_landing_pad) == NULL)
3889 remove_unreachable_handlers ();
3890 return;
3895 /* Remove regions that do not have landing pads. This assumes
3896 that remove_unreachable_handlers has already been run, and
3897 that we've just manipulated the landing pads since then.
3899 Preserve regions with landing pads and regions that prevent
3900 exceptions from propagating further, even if these regions
3901 are not reachable. */
3903 static void
3904 remove_unreachable_handlers_no_lp (void)
3906 eh_region region;
3907 sbitmap r_reachable;
3908 unsigned i;
3910 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3912 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3914 if (! region)
3915 continue;
3917 if (region->landing_pads != NULL
3918 || region->type == ERT_MUST_NOT_THROW)
3919 bitmap_set_bit (r_reachable, region->index);
3921 if (dump_file
3922 && !bitmap_bit_p (r_reachable, region->index))
3923 fprintf (dump_file,
3924 "Removing unreachable region %d\n",
3925 region->index);
3928 remove_unreachable_eh_regions (r_reachable);
3930 sbitmap_free (r_reachable);
3933 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3934 optimisticaly split all sorts of edges, including EH edges. The
3935 optimization passes in between may not have needed them; if not,
3936 we should undo the split.
3938 Recognize this case by having one EH edge incoming to the BB and
3939 one normal edge outgoing; BB should be empty apart from the
3940 post_landing_pad label.
3942 Note that this is slightly different from the empty handler case
3943 handled by cleanup_empty_eh, in that the actual handler may yet
3944 have actual code but the landing pad has been separated from the
3945 handler. As such, cleanup_empty_eh relies on this transformation
3946 having been done first. */
3948 static bool
3949 unsplit_eh (eh_landing_pad lp)
3951 basic_block bb = label_to_block (lp->post_landing_pad);
3952 gimple_stmt_iterator gsi;
3953 edge e_in, e_out;
3955 /* Quickly check the edge counts on BB for singularity. */
3956 if (!single_pred_p (bb) || !single_succ_p (bb))
3957 return false;
3958 e_in = single_pred_edge (bb);
3959 e_out = single_succ_edge (bb);
3961 /* Input edge must be EH and output edge must be normal. */
3962 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3963 return false;
3965 /* The block must be empty except for the labels and debug insns. */
3966 gsi = gsi_after_labels (bb);
3967 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3968 gsi_next_nondebug (&gsi);
3969 if (!gsi_end_p (gsi))
3970 return false;
3972 /* The destination block must not already have a landing pad
3973 for a different region. */
3974 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3976 gimple stmt = gsi_stmt (gsi);
3977 tree lab;
3978 int lp_nr;
3980 if (gimple_code (stmt) != GIMPLE_LABEL)
3981 break;
3982 lab = gimple_label_label (stmt);
3983 lp_nr = EH_LANDING_PAD_NR (lab);
3984 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3985 return false;
3988 /* The new destination block must not already be a destination of
3989 the source block, lest we merge fallthru and eh edges and get
3990 all sorts of confused. */
3991 if (find_edge (e_in->src, e_out->dest))
3992 return false;
3994 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3995 thought this should have been cleaned up by a phicprop pass, but
3996 that doesn't appear to handle virtuals. Propagate by hand. */
3997 if (!gimple_seq_empty_p (phi_nodes (bb)))
3999 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
4001 gimple use_stmt, phi = gsi_stmt (gsi);
4002 tree lhs = gimple_phi_result (phi);
4003 tree rhs = gimple_phi_arg_def (phi, 0);
4004 use_operand_p use_p;
4005 imm_use_iterator iter;
4007 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4009 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4010 SET_USE (use_p, rhs);
4013 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4014 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4016 remove_phi_node (&gsi, true);
4020 if (dump_file && (dump_flags & TDF_DETAILS))
4021 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4022 lp->index, e_out->dest->index);
4024 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4025 a successor edge, humor it. But do the real CFG change with the
4026 predecessor of E_OUT in order to preserve the ordering of arguments
4027 to the PHI nodes in E_OUT->DEST. */
4028 redirect_eh_edge_1 (e_in, e_out->dest, false);
4029 redirect_edge_pred (e_out, e_in->src);
4030 e_out->flags = e_in->flags;
4031 e_out->probability = e_in->probability;
4032 e_out->count = e_in->count;
4033 remove_edge (e_in);
4035 return true;
4038 /* Examine each landing pad block and see if it matches unsplit_eh. */
4040 static bool
4041 unsplit_all_eh (void)
4043 bool changed = false;
4044 eh_landing_pad lp;
4045 int i;
4047 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4048 if (lp)
4049 changed |= unsplit_eh (lp);
4051 return changed;
4054 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4055 to OLD_BB to NEW_BB; return true on success, false on failure.
4057 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4058 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4059 Virtual PHIs may be deleted and marked for renaming. */
4061 static bool
4062 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4063 edge old_bb_out, bool change_region)
4065 gimple_stmt_iterator ngsi, ogsi;
4066 edge_iterator ei;
4067 edge e;
4068 bitmap ophi_handled;
4070 /* The destination block must not be a regular successor for any
4071 of the preds of the landing pad. Thus, avoid turning
4072 <..>
4073 | \ EH
4074 | <..>
4076 <..>
4077 into
4078 <..>
4079 | | EH
4080 <..>
4081 which CFG verification would choke on. See PR45172 and PR51089. */
4082 FOR_EACH_EDGE (e, ei, old_bb->preds)
4083 if (find_edge (e->src, new_bb))
4084 return false;
4086 FOR_EACH_EDGE (e, ei, old_bb->preds)
4087 redirect_edge_var_map_clear (e);
4089 ophi_handled = BITMAP_ALLOC (NULL);
4091 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4092 for the edges we're going to move. */
4093 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4095 gimple ophi, nphi = gsi_stmt (ngsi);
4096 tree nresult, nop;
4098 nresult = gimple_phi_result (nphi);
4099 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4101 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4102 the source ssa_name. */
4103 ophi = NULL;
4104 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4106 ophi = gsi_stmt (ogsi);
4107 if (gimple_phi_result (ophi) == nop)
4108 break;
4109 ophi = NULL;
4112 /* If we did find the corresponding PHI, copy those inputs. */
4113 if (ophi)
4115 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4116 if (!has_single_use (nop))
4118 imm_use_iterator imm_iter;
4119 use_operand_p use_p;
4121 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4123 if (!gimple_debug_bind_p (USE_STMT (use_p))
4124 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4125 || gimple_bb (USE_STMT (use_p)) != new_bb))
4126 goto fail;
4129 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4130 FOR_EACH_EDGE (e, ei, old_bb->preds)
4132 location_t oloc;
4133 tree oop;
4135 if ((e->flags & EDGE_EH) == 0)
4136 continue;
4137 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4138 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4139 redirect_edge_var_map_add (e, nresult, oop, oloc);
4142 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4143 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4144 variable is unchanged from input to the block and we can simply
4145 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4146 else
4148 location_t nloc
4149 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4150 FOR_EACH_EDGE (e, ei, old_bb->preds)
4151 redirect_edge_var_map_add (e, nresult, nop, nloc);
4155 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4156 we don't know what values from the other edges into NEW_BB to use. */
4157 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4159 gimple ophi = gsi_stmt (ogsi);
4160 tree oresult = gimple_phi_result (ophi);
4161 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4162 goto fail;
4165 /* Finally, move the edges and update the PHIs. */
4166 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4167 if (e->flags & EDGE_EH)
4169 /* ??? CFG manipluation routines do not try to update loop
4170 form on edge redirection. Do so manually here for now. */
4171 /* If we redirect a loop entry or latch edge that will either create
4172 a multiple entry loop or rotate the loop. If the loops merge
4173 we may have created a loop with multiple latches.
4174 All of this isn't easily fixed thus cancel the affected loop
4175 and mark the other loop as possibly having multiple latches. */
4176 if (e->dest == e->dest->loop_father->header)
4178 mark_loop_for_removal (e->dest->loop_father);
4179 new_bb->loop_father->latch = NULL;
4180 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4182 redirect_eh_edge_1 (e, new_bb, change_region);
4183 redirect_edge_succ (e, new_bb);
4184 flush_pending_stmts (e);
4186 else
4187 ei_next (&ei);
4189 BITMAP_FREE (ophi_handled);
4190 return true;
4192 fail:
4193 FOR_EACH_EDGE (e, ei, old_bb->preds)
4194 redirect_edge_var_map_clear (e);
4195 BITMAP_FREE (ophi_handled);
4196 return false;
4199 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4200 old region to NEW_REGION at BB. */
4202 static void
4203 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4204 eh_landing_pad lp, eh_region new_region)
4206 gimple_stmt_iterator gsi;
4207 eh_landing_pad *pp;
4209 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4210 continue;
4211 *pp = lp->next_lp;
4213 lp->region = new_region;
4214 lp->next_lp = new_region->landing_pads;
4215 new_region->landing_pads = lp;
4217 /* Delete the RESX that was matched within the empty handler block. */
4218 gsi = gsi_last_bb (bb);
4219 unlink_stmt_vdef (gsi_stmt (gsi));
4220 gsi_remove (&gsi, true);
4222 /* Clean up E_OUT for the fallthru. */
4223 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4224 e_out->probability = REG_BR_PROB_BASE;
4227 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4228 unsplitting than unsplit_eh was prepared to handle, e.g. when
4229 multiple incoming edges and phis are involved. */
4231 static bool
4232 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4234 gimple_stmt_iterator gsi;
4235 tree lab;
4237 /* We really ought not have totally lost everything following
4238 a landing pad label. Given that BB is empty, there had better
4239 be a successor. */
4240 gcc_assert (e_out != NULL);
4242 /* The destination block must not already have a landing pad
4243 for a different region. */
4244 lab = NULL;
4245 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4247 gimple stmt = gsi_stmt (gsi);
4248 int lp_nr;
4250 if (gimple_code (stmt) != GIMPLE_LABEL)
4251 break;
4252 lab = gimple_label_label (stmt);
4253 lp_nr = EH_LANDING_PAD_NR (lab);
4254 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4255 return false;
4258 /* Attempt to move the PHIs into the successor block. */
4259 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4261 if (dump_file && (dump_flags & TDF_DETAILS))
4262 fprintf (dump_file,
4263 "Unsplit EH landing pad %d to block %i "
4264 "(via cleanup_empty_eh).\n",
4265 lp->index, e_out->dest->index);
4266 return true;
4269 return false;
4272 /* Return true if edge E_FIRST is part of an empty infinite loop
4273 or leads to such a loop through a series of single successor
4274 empty bbs. */
4276 static bool
4277 infinite_empty_loop_p (edge e_first)
4279 bool inf_loop = false;
4280 edge e;
4282 if (e_first->dest == e_first->src)
4283 return true;
4285 e_first->src->aux = (void *) 1;
4286 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4288 gimple_stmt_iterator gsi;
4289 if (e->dest->aux)
4291 inf_loop = true;
4292 break;
4294 e->dest->aux = (void *) 1;
4295 gsi = gsi_after_labels (e->dest);
4296 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4297 gsi_next_nondebug (&gsi);
4298 if (!gsi_end_p (gsi))
4299 break;
4301 e_first->src->aux = NULL;
4302 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4303 e->dest->aux = NULL;
4305 return inf_loop;
4308 /* Examine the block associated with LP to determine if it's an empty
4309 handler for its EH region. If so, attempt to redirect EH edges to
4310 an outer region. Return true the CFG was updated in any way. This
4311 is similar to jump forwarding, just across EH edges. */
4313 static bool
4314 cleanup_empty_eh (eh_landing_pad lp)
4316 basic_block bb = label_to_block (lp->post_landing_pad);
4317 gimple_stmt_iterator gsi;
4318 gimple resx;
4319 eh_region new_region;
4320 edge_iterator ei;
4321 edge e, e_out;
4322 bool has_non_eh_pred;
4323 bool ret = false;
4324 int new_lp_nr;
4326 /* There can be zero or one edges out of BB. This is the quickest test. */
4327 switch (EDGE_COUNT (bb->succs))
4329 case 0:
4330 e_out = NULL;
4331 break;
4332 case 1:
4333 e_out = single_succ_edge (bb);
4334 break;
4335 default:
4336 return false;
4339 resx = last_stmt (bb);
4340 if (resx && is_gimple_resx (resx))
4342 if (stmt_can_throw_external (resx))
4343 optimize_clobbers (bb);
4344 else if (sink_clobbers (bb))
4345 ret = true;
4348 gsi = gsi_after_labels (bb);
4350 /* Make sure to skip debug statements. */
4351 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4352 gsi_next_nondebug (&gsi);
4354 /* If the block is totally empty, look for more unsplitting cases. */
4355 if (gsi_end_p (gsi))
4357 /* For the degenerate case of an infinite loop bail out.
4358 If bb has no successors and is totally empty, which can happen e.g.
4359 because of incorrect noreturn attribute, bail out too. */
4360 if (e_out == NULL
4361 || infinite_empty_loop_p (e_out))
4362 return ret;
4364 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4367 /* The block should consist only of a single RESX statement, modulo a
4368 preceding call to __builtin_stack_restore if there is no outgoing
4369 edge, since the call can be eliminated in this case. */
4370 resx = gsi_stmt (gsi);
4371 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4373 gsi_next (&gsi);
4374 resx = gsi_stmt (gsi);
4376 if (!is_gimple_resx (resx))
4377 return ret;
4378 gcc_assert (gsi_one_before_end_p (gsi));
4380 /* Determine if there are non-EH edges, or resx edges into the handler. */
4381 has_non_eh_pred = false;
4382 FOR_EACH_EDGE (e, ei, bb->preds)
4383 if (!(e->flags & EDGE_EH))
4384 has_non_eh_pred = true;
4386 /* Find the handler that's outer of the empty handler by looking at
4387 where the RESX instruction was vectored. */
4388 new_lp_nr = lookup_stmt_eh_lp (resx);
4389 new_region = get_eh_region_from_lp_number (new_lp_nr);
4391 /* If there's no destination region within the current function,
4392 redirection is trivial via removing the throwing statements from
4393 the EH region, removing the EH edges, and allowing the block
4394 to go unreachable. */
4395 if (new_region == NULL)
4397 gcc_assert (e_out == NULL);
4398 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4399 if (e->flags & EDGE_EH)
4401 gimple stmt = last_stmt (e->src);
4402 remove_stmt_from_eh_lp (stmt);
4403 remove_edge (e);
4405 else
4406 ei_next (&ei);
4407 goto succeed;
4410 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4411 to handle the abort and allow the blocks to go unreachable. */
4412 if (new_region->type == ERT_MUST_NOT_THROW)
4414 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4415 if (e->flags & EDGE_EH)
4417 gimple stmt = last_stmt (e->src);
4418 remove_stmt_from_eh_lp (stmt);
4419 add_stmt_to_eh_lp (stmt, new_lp_nr);
4420 remove_edge (e);
4422 else
4423 ei_next (&ei);
4424 goto succeed;
4427 /* Try to redirect the EH edges and merge the PHIs into the destination
4428 landing pad block. If the merge succeeds, we'll already have redirected
4429 all the EH edges. The handler itself will go unreachable if there were
4430 no normal edges. */
4431 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4432 goto succeed;
4434 /* Finally, if all input edges are EH edges, then we can (potentially)
4435 reduce the number of transfers from the runtime by moving the landing
4436 pad from the original region to the new region. This is a win when
4437 we remove the last CLEANUP region along a particular exception
4438 propagation path. Since nothing changes except for the region with
4439 which the landing pad is associated, the PHI nodes do not need to be
4440 adjusted at all. */
4441 if (!has_non_eh_pred)
4443 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4444 if (dump_file && (dump_flags & TDF_DETAILS))
4445 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4446 lp->index, new_region->index);
4448 /* ??? The CFG didn't change, but we may have rendered the
4449 old EH region unreachable. Trigger a cleanup there. */
4450 return true;
4453 return ret;
4455 succeed:
4456 if (dump_file && (dump_flags & TDF_DETAILS))
4457 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4458 remove_eh_landing_pad (lp);
4459 return true;
4462 /* Do a post-order traversal of the EH region tree. Examine each
4463 post_landing_pad block and see if we can eliminate it as empty. */
4465 static bool
4466 cleanup_all_empty_eh (void)
4468 bool changed = false;
4469 eh_landing_pad lp;
4470 int i;
4472 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4473 if (lp)
4474 changed |= cleanup_empty_eh (lp);
4476 return changed;
4479 /* Perform cleanups and lowering of exception handling
4480 1) cleanups regions with handlers doing nothing are optimized out
4481 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4482 3) Info about regions that are containing instructions, and regions
4483 reachable via local EH edges is collected
4484 4) Eh tree is pruned for regions no longer necessary.
4486 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4487 Unify those that have the same failure decl and locus.
4490 static unsigned int
4491 execute_cleanup_eh_1 (void)
4493 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4494 looking up unreachable landing pads. */
4495 remove_unreachable_handlers ();
4497 /* Watch out for the region tree vanishing due to all unreachable. */
4498 if (cfun->eh->region_tree)
4500 bool changed = false;
4502 if (optimize)
4503 changed |= unsplit_all_eh ();
4504 changed |= cleanup_all_empty_eh ();
4506 if (changed)
4508 free_dominance_info (CDI_DOMINATORS);
4509 free_dominance_info (CDI_POST_DOMINATORS);
4511 /* We delayed all basic block deletion, as we may have performed
4512 cleanups on EH edges while non-EH edges were still present. */
4513 delete_unreachable_blocks ();
4515 /* We manipulated the landing pads. Remove any region that no
4516 longer has a landing pad. */
4517 remove_unreachable_handlers_no_lp ();
4519 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4523 return 0;
4526 namespace {
4528 const pass_data pass_data_cleanup_eh =
4530 GIMPLE_PASS, /* type */
4531 "ehcleanup", /* name */
4532 OPTGROUP_NONE, /* optinfo_flags */
4533 TV_TREE_EH, /* tv_id */
4534 PROP_gimple_lcf, /* properties_required */
4535 0, /* properties_provided */
4536 0, /* properties_destroyed */
4537 0, /* todo_flags_start */
4538 0, /* todo_flags_finish */
4541 class pass_cleanup_eh : public gimple_opt_pass
4543 public:
4544 pass_cleanup_eh (gcc::context *ctxt)
4545 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4548 /* opt_pass methods: */
4549 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4550 virtual bool gate (function *fun)
4552 return fun->eh != NULL && fun->eh->region_tree != NULL;
4555 virtual unsigned int execute (function *);
4557 }; // class pass_cleanup_eh
4559 unsigned int
4560 pass_cleanup_eh::execute (function *fun)
4562 int ret = execute_cleanup_eh_1 ();
4564 /* If the function no longer needs an EH personality routine
4565 clear it. This exposes cross-language inlining opportunities
4566 and avoids references to a never defined personality routine. */
4567 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4568 && function_needs_eh_personality (fun) != eh_personality_lang)
4569 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4571 return ret;
4574 } // anon namespace
4576 gimple_opt_pass *
4577 make_pass_cleanup_eh (gcc::context *ctxt)
4579 return new pass_cleanup_eh (ctxt);
4582 /* Verify that BB containing STMT as the last statement, has precisely the
4583 edge that make_eh_edges would create. */
4585 DEBUG_FUNCTION bool
4586 verify_eh_edges (gimple stmt)
4588 basic_block bb = gimple_bb (stmt);
4589 eh_landing_pad lp = NULL;
4590 int lp_nr;
4591 edge_iterator ei;
4592 edge e, eh_edge;
4594 lp_nr = lookup_stmt_eh_lp (stmt);
4595 if (lp_nr > 0)
4596 lp = get_eh_landing_pad_from_number (lp_nr);
4598 eh_edge = NULL;
4599 FOR_EACH_EDGE (e, ei, bb->succs)
4601 if (e->flags & EDGE_EH)
4603 if (eh_edge)
4605 error ("BB %i has multiple EH edges", bb->index);
4606 return true;
4608 else
4609 eh_edge = e;
4613 if (lp == NULL)
4615 if (eh_edge)
4617 error ("BB %i can not throw but has an EH edge", bb->index);
4618 return true;
4620 return false;
4623 if (!stmt_could_throw_p (stmt))
4625 error ("BB %i last statement has incorrectly set lp", bb->index);
4626 return true;
4629 if (eh_edge == NULL)
4631 error ("BB %i is missing an EH edge", bb->index);
4632 return true;
4635 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4637 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4638 return true;
4641 return false;
4644 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4646 DEBUG_FUNCTION bool
4647 verify_eh_dispatch_edge (gimple stmt)
4649 eh_region r;
4650 eh_catch c;
4651 basic_block src, dst;
4652 bool want_fallthru = true;
4653 edge_iterator ei;
4654 edge e, fall_edge;
4656 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4657 src = gimple_bb (stmt);
4659 FOR_EACH_EDGE (e, ei, src->succs)
4660 gcc_assert (e->aux == NULL);
4662 switch (r->type)
4664 case ERT_TRY:
4665 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4667 dst = label_to_block (c->label);
4668 e = find_edge (src, dst);
4669 if (e == NULL)
4671 error ("BB %i is missing an edge", src->index);
4672 return true;
4674 e->aux = (void *)e;
4676 /* A catch-all handler doesn't have a fallthru. */
4677 if (c->type_list == NULL)
4679 want_fallthru = false;
4680 break;
4683 break;
4685 case ERT_ALLOWED_EXCEPTIONS:
4686 dst = label_to_block (r->u.allowed.label);
4687 e = find_edge (src, dst);
4688 if (e == NULL)
4690 error ("BB %i is missing an edge", src->index);
4691 return true;
4693 e->aux = (void *)e;
4694 break;
4696 default:
4697 gcc_unreachable ();
4700 fall_edge = NULL;
4701 FOR_EACH_EDGE (e, ei, src->succs)
4703 if (e->flags & EDGE_FALLTHRU)
4705 if (fall_edge != NULL)
4707 error ("BB %i too many fallthru edges", src->index);
4708 return true;
4710 fall_edge = e;
4712 else if (e->aux)
4713 e->aux = NULL;
4714 else
4716 error ("BB %i has incorrect edge", src->index);
4717 return true;
4720 if ((fall_edge != NULL) ^ want_fallthru)
4722 error ("BB %i has incorrect fallthru edge", src->index);
4723 return true;
4726 return false;