[AArch64] Define TARGET_FLAGS_REGNUM
[official-gcc.git] / gcc / tree-eh.c
blob960c04a14d0fa930eb2cf118fc8c540178fa8516
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-table.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "expr.h"
27 #include "calls.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "tree-eh.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "gimple-iterator.h"
40 #include "gimple-ssa.h"
41 #include "cgraph.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-into-ssa.h"
48 #include "tree-ssa.h"
49 #include "tree-inline.h"
50 #include "tree-pass.h"
51 #include "langhooks.h"
52 #include "diagnostic-core.h"
53 #include "target.h"
54 #include "cfgloop.h"
55 #include "gimple-low.h"
57 /* In some instances a tree and a gimple need to be stored in a same table,
58 i.e. in hash tables. This is a structure to do this. */
59 typedef union {tree *tp; tree t; gimple g;} treemple;
61 /* Misc functions used in this file. */
63 /* Remember and lookup EH landing pad data for arbitrary statements.
64 Really this means any statement that could_throw_p. We could
65 stuff this information into the stmt_ann data structure, but:
67 (1) We absolutely rely on this information being kept until
68 we get to rtl. Once we're done with lowering here, if we lose
69 the information there's no way to recover it!
71 (2) There are many more statements that *cannot* throw as
72 compared to those that can. We should be saving some amount
73 of space by only allocating memory for those that can throw. */
75 /* Add statement T in function IFUN to landing pad NUM. */
77 static void
78 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
80 struct throw_stmt_node *n;
81 void **slot;
83 gcc_assert (num != 0);
85 n = ggc_alloc_throw_stmt_node ();
86 n->stmt = t;
87 n->lp_nr = num;
89 if (!get_eh_throw_stmt_table (ifun))
90 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
91 struct_ptr_eq,
92 ggc_free));
94 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
95 gcc_assert (!*slot);
96 *slot = n;
99 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
101 void
102 add_stmt_to_eh_lp (gimple t, int num)
104 add_stmt_to_eh_lp_fn (cfun, t, num);
107 /* Add statement T to the single EH landing pad in REGION. */
109 static void
110 record_stmt_eh_region (eh_region region, gimple t)
112 if (region == NULL)
113 return;
114 if (region->type == ERT_MUST_NOT_THROW)
115 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
116 else
118 eh_landing_pad lp = region->landing_pads;
119 if (lp == NULL)
120 lp = gen_eh_landing_pad (region);
121 else
122 gcc_assert (lp->next_lp == NULL);
123 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
128 /* Remove statement T in function IFUN from its EH landing pad. */
130 bool
131 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
133 struct throw_stmt_node dummy;
134 void **slot;
136 if (!get_eh_throw_stmt_table (ifun))
137 return false;
139 dummy.stmt = t;
140 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
141 NO_INSERT);
142 if (slot)
144 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
145 return true;
147 else
148 return false;
152 /* Remove statement T in the current function (cfun) from its
153 EH landing pad. */
155 bool
156 remove_stmt_from_eh_lp (gimple t)
158 return remove_stmt_from_eh_lp_fn (cfun, t);
161 /* Determine if statement T is inside an EH region in function IFUN.
162 Positive numbers indicate a landing pad index; negative numbers
163 indicate a MUST_NOT_THROW region index; zero indicates that the
164 statement is not recorded in the region table. */
167 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
169 struct throw_stmt_node *p, n;
171 if (ifun->eh->throw_stmt_table == NULL)
172 return 0;
174 n.stmt = t;
175 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
176 return p ? p->lp_nr : 0;
179 /* Likewise, but always use the current function. */
182 lookup_stmt_eh_lp (gimple t)
184 /* We can get called from initialized data when -fnon-call-exceptions
185 is on; prevent crash. */
186 if (!cfun)
187 return 0;
188 return lookup_stmt_eh_lp_fn (cfun, t);
191 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
192 nodes and LABEL_DECL nodes. We will use this during the second phase to
193 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
195 struct finally_tree_node
197 /* When storing a GIMPLE_TRY, we have to record a gimple. However
198 when deciding whether a GOTO to a certain LABEL_DECL (which is a
199 tree) leaves the TRY block, its necessary to record a tree in
200 this field. Thus a treemple is used. */
201 treemple child;
202 gimple parent;
205 /* Hashtable helpers. */
207 struct finally_tree_hasher : typed_free_remove <finally_tree_node>
209 typedef finally_tree_node value_type;
210 typedef finally_tree_node compare_type;
211 static inline hashval_t hash (const value_type *);
212 static inline bool equal (const value_type *, const compare_type *);
215 inline hashval_t
216 finally_tree_hasher::hash (const value_type *v)
218 return (intptr_t)v->child.t >> 4;
221 inline bool
222 finally_tree_hasher::equal (const value_type *v, const compare_type *c)
224 return v->child.t == c->child.t;
227 /* Note that this table is *not* marked GTY. It is short-lived. */
228 static hash_table <finally_tree_hasher> finally_tree;
230 static void
231 record_in_finally_tree (treemple child, gimple parent)
233 struct finally_tree_node *n;
234 finally_tree_node **slot;
236 n = XNEW (struct finally_tree_node);
237 n->child = child;
238 n->parent = parent;
240 slot = finally_tree.find_slot (n, INSERT);
241 gcc_assert (!*slot);
242 *slot = n;
245 static void
246 collect_finally_tree (gimple stmt, gimple region);
248 /* Go through the gimple sequence. Works with collect_finally_tree to
249 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
251 static void
252 collect_finally_tree_1 (gimple_seq seq, gimple region)
254 gimple_stmt_iterator gsi;
256 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
257 collect_finally_tree (gsi_stmt (gsi), region);
260 static void
261 collect_finally_tree (gimple stmt, gimple region)
263 treemple temp;
265 switch (gimple_code (stmt))
267 case GIMPLE_LABEL:
268 temp.t = gimple_label_label (stmt);
269 record_in_finally_tree (temp, region);
270 break;
272 case GIMPLE_TRY:
273 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
275 temp.g = stmt;
276 record_in_finally_tree (temp, region);
277 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
278 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
280 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
282 collect_finally_tree_1 (gimple_try_eval (stmt), region);
283 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
285 break;
287 case GIMPLE_CATCH:
288 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
289 break;
291 case GIMPLE_EH_FILTER:
292 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
293 break;
295 case GIMPLE_EH_ELSE:
296 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
297 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
298 break;
300 default:
301 /* A type, a decl, or some kind of statement that we're not
302 interested in. Don't walk them. */
303 break;
308 /* Use the finally tree to determine if a jump from START to TARGET
309 would leave the try_finally node that START lives in. */
311 static bool
312 outside_finally_tree (treemple start, gimple target)
314 struct finally_tree_node n, *p;
318 n.child = start;
319 p = finally_tree.find (&n);
320 if (!p)
321 return true;
322 start.g = p->parent;
324 while (start.g != target);
326 return false;
329 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
330 nodes into a set of gotos, magic labels, and eh regions.
331 The eh region creation is straight-forward, but frobbing all the gotos
332 and such into shape isn't. */
334 /* The sequence into which we record all EH stuff. This will be
335 placed at the end of the function when we're all done. */
336 static gimple_seq eh_seq;
338 /* Record whether an EH region contains something that can throw,
339 indexed by EH region number. */
340 static bitmap eh_region_may_contain_throw_map;
342 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
343 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
344 The idea is to record a gimple statement for everything except for
345 the conditionals, which get their labels recorded. Since labels are
346 of type 'tree', we need this node to store both gimple and tree
347 objects. REPL_STMT is the sequence used to replace the goto/return
348 statement. CONT_STMT is used to store the statement that allows
349 the return/goto to jump to the original destination. */
351 struct goto_queue_node
353 treemple stmt;
354 location_t location;
355 gimple_seq repl_stmt;
356 gimple cont_stmt;
357 int index;
358 /* This is used when index >= 0 to indicate that stmt is a label (as
359 opposed to a goto stmt). */
360 int is_label;
363 /* State of the world while lowering. */
365 struct leh_state
367 /* What's "current" while constructing the eh region tree. These
368 correspond to variables of the same name in cfun->eh, which we
369 don't have easy access to. */
370 eh_region cur_region;
372 /* What's "current" for the purposes of __builtin_eh_pointer. For
373 a CATCH, this is the associated TRY. For an EH_FILTER, this is
374 the associated ALLOWED_EXCEPTIONS, etc. */
375 eh_region ehp_region;
377 /* Processing of TRY_FINALLY requires a bit more state. This is
378 split out into a separate structure so that we don't have to
379 copy so much when processing other nodes. */
380 struct leh_tf_state *tf;
383 struct leh_tf_state
385 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
386 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
387 this so that outside_finally_tree can reliably reference the tree used
388 in the collect_finally_tree data structures. */
389 gimple try_finally_expr;
390 gimple top_p;
392 /* While lowering a top_p usually it is expanded into multiple statements,
393 thus we need the following field to store them. */
394 gimple_seq top_p_seq;
396 /* The state outside this try_finally node. */
397 struct leh_state *outer;
399 /* The exception region created for it. */
400 eh_region region;
402 /* The goto queue. */
403 struct goto_queue_node *goto_queue;
404 size_t goto_queue_size;
405 size_t goto_queue_active;
407 /* Pointer map to help in searching goto_queue when it is large. */
408 struct pointer_map_t *goto_queue_map;
410 /* The set of unique labels seen as entries in the goto queue. */
411 vec<tree> dest_array;
413 /* A label to be added at the end of the completed transformed
414 sequence. It will be set if may_fallthru was true *at one time*,
415 though subsequent transformations may have cleared that flag. */
416 tree fallthru_label;
418 /* True if it is possible to fall out the bottom of the try block.
419 Cleared if the fallthru is converted to a goto. */
420 bool may_fallthru;
422 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
423 bool may_return;
425 /* True if the finally block can receive an exception edge.
426 Cleared if the exception case is handled by code duplication. */
427 bool may_throw;
430 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
432 /* Search for STMT in the goto queue. Return the replacement,
433 or null if the statement isn't in the queue. */
435 #define LARGE_GOTO_QUEUE 20
437 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
439 static gimple_seq
440 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
442 unsigned int i;
443 void **slot;
445 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
447 for (i = 0; i < tf->goto_queue_active; i++)
448 if ( tf->goto_queue[i].stmt.g == stmt.g)
449 return tf->goto_queue[i].repl_stmt;
450 return NULL;
453 /* If we have a large number of entries in the goto_queue, create a
454 pointer map and use that for searching. */
456 if (!tf->goto_queue_map)
458 tf->goto_queue_map = pointer_map_create ();
459 for (i = 0; i < tf->goto_queue_active; i++)
461 slot = pointer_map_insert (tf->goto_queue_map,
462 tf->goto_queue[i].stmt.g);
463 gcc_assert (*slot == NULL);
464 *slot = &tf->goto_queue[i];
468 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
469 if (slot != NULL)
470 return (((struct goto_queue_node *) *slot)->repl_stmt);
472 return NULL;
475 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
476 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
477 then we can just splat it in, otherwise we add the new stmts immediately
478 after the GIMPLE_COND and redirect. */
480 static void
481 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
482 gimple_stmt_iterator *gsi)
484 tree label;
485 gimple_seq new_seq;
486 treemple temp;
487 location_t loc = gimple_location (gsi_stmt (*gsi));
489 temp.tp = tp;
490 new_seq = find_goto_replacement (tf, temp);
491 if (!new_seq)
492 return;
494 if (gimple_seq_singleton_p (new_seq)
495 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
497 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
498 return;
501 label = create_artificial_label (loc);
502 /* Set the new label for the GIMPLE_COND */
503 *tp = label;
505 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
506 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
509 /* The real work of replace_goto_queue. Returns with TSI updated to
510 point to the next statement. */
512 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
514 static void
515 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
516 gimple_stmt_iterator *gsi)
518 gimple_seq seq;
519 treemple temp;
520 temp.g = NULL;
522 switch (gimple_code (stmt))
524 case GIMPLE_GOTO:
525 case GIMPLE_RETURN:
526 temp.g = stmt;
527 seq = find_goto_replacement (tf, temp);
528 if (seq)
530 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
531 gsi_remove (gsi, false);
532 return;
534 break;
536 case GIMPLE_COND:
537 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
538 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
539 break;
541 case GIMPLE_TRY:
542 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
543 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
544 break;
545 case GIMPLE_CATCH:
546 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
547 break;
548 case GIMPLE_EH_FILTER:
549 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
550 break;
551 case GIMPLE_EH_ELSE:
552 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
553 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
554 break;
556 default:
557 /* These won't have gotos in them. */
558 break;
561 gsi_next (gsi);
564 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
566 static void
567 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
569 gimple_stmt_iterator gsi = gsi_start (*seq);
571 while (!gsi_end_p (gsi))
572 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
575 /* Replace all goto queue members. */
577 static void
578 replace_goto_queue (struct leh_tf_state *tf)
580 if (tf->goto_queue_active == 0)
581 return;
582 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
583 replace_goto_queue_stmt_list (&eh_seq, tf);
586 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
587 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
588 a gimple return. */
590 static void
591 record_in_goto_queue (struct leh_tf_state *tf,
592 treemple new_stmt,
593 int index,
594 bool is_label,
595 location_t location)
597 size_t active, size;
598 struct goto_queue_node *q;
600 gcc_assert (!tf->goto_queue_map);
602 active = tf->goto_queue_active;
603 size = tf->goto_queue_size;
604 if (active >= size)
606 size = (size ? size * 2 : 32);
607 tf->goto_queue_size = size;
608 tf->goto_queue
609 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
612 q = &tf->goto_queue[active];
613 tf->goto_queue_active = active + 1;
615 memset (q, 0, sizeof (*q));
616 q->stmt = new_stmt;
617 q->index = index;
618 q->location = location;
619 q->is_label = is_label;
622 /* Record the LABEL label in the goto queue contained in TF.
623 TF is not null. */
625 static void
626 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
627 location_t location)
629 int index;
630 treemple temp, new_stmt;
632 if (!label)
633 return;
635 /* Computed and non-local gotos do not get processed. Given
636 their nature we can neither tell whether we've escaped the
637 finally block nor redirect them if we knew. */
638 if (TREE_CODE (label) != LABEL_DECL)
639 return;
641 /* No need to record gotos that don't leave the try block. */
642 temp.t = label;
643 if (!outside_finally_tree (temp, tf->try_finally_expr))
644 return;
646 if (! tf->dest_array.exists ())
648 tf->dest_array.create (10);
649 tf->dest_array.quick_push (label);
650 index = 0;
652 else
654 int n = tf->dest_array.length ();
655 for (index = 0; index < n; ++index)
656 if (tf->dest_array[index] == label)
657 break;
658 if (index == n)
659 tf->dest_array.safe_push (label);
662 /* In the case of a GOTO we want to record the destination label,
663 since with a GIMPLE_COND we have an easy access to the then/else
664 labels. */
665 new_stmt = stmt;
666 record_in_goto_queue (tf, new_stmt, index, true, location);
669 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
670 node, and if so record that fact in the goto queue associated with that
671 try_finally node. */
673 static void
674 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
676 struct leh_tf_state *tf = state->tf;
677 treemple new_stmt;
679 if (!tf)
680 return;
682 switch (gimple_code (stmt))
684 case GIMPLE_COND:
685 new_stmt.tp = gimple_op_ptr (stmt, 2);
686 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
687 EXPR_LOCATION (*new_stmt.tp));
688 new_stmt.tp = gimple_op_ptr (stmt, 3);
689 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
690 EXPR_LOCATION (*new_stmt.tp));
691 break;
692 case GIMPLE_GOTO:
693 new_stmt.g = stmt;
694 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
695 gimple_location (stmt));
696 break;
698 case GIMPLE_RETURN:
699 tf->may_return = true;
700 new_stmt.g = stmt;
701 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
702 break;
704 default:
705 gcc_unreachable ();
710 #ifdef ENABLE_CHECKING
711 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
712 was in fact structured, and we've not yet done jump threading, then none
713 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
715 static void
716 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
718 struct leh_tf_state *tf = state->tf;
719 size_t i, n;
721 if (!tf)
722 return;
724 n = gimple_switch_num_labels (switch_expr);
726 for (i = 0; i < n; ++i)
728 treemple temp;
729 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
730 temp.t = lab;
731 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
734 #else
735 #define verify_norecord_switch_expr(state, switch_expr)
736 #endif
738 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
739 non-null, insert it before the new branch. */
741 static void
742 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
744 gimple x;
746 /* In the case of a return, the queue node must be a gimple statement. */
747 gcc_assert (!q->is_label);
749 /* Note that the return value may have already been computed, e.g.,
751 int x;
752 int foo (void)
754 x = 0;
755 try {
756 return x;
757 } finally {
758 x++;
762 should return 0, not 1. We don't have to do anything to make
763 this happens because the return value has been placed in the
764 RESULT_DECL already. */
766 q->cont_stmt = q->stmt.g;
768 if (mod)
769 gimple_seq_add_seq (&q->repl_stmt, mod);
771 x = gimple_build_goto (finlab);
772 gimple_set_location (x, q->location);
773 gimple_seq_add_stmt (&q->repl_stmt, x);
776 /* Similar, but easier, for GIMPLE_GOTO. */
778 static void
779 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
780 struct leh_tf_state *tf)
782 gimple x;
784 gcc_assert (q->is_label);
786 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
788 if (mod)
789 gimple_seq_add_seq (&q->repl_stmt, mod);
791 x = gimple_build_goto (finlab);
792 gimple_set_location (x, q->location);
793 gimple_seq_add_stmt (&q->repl_stmt, x);
796 /* Emit a standard landing pad sequence into SEQ for REGION. */
798 static void
799 emit_post_landing_pad (gimple_seq *seq, eh_region region)
801 eh_landing_pad lp = region->landing_pads;
802 gimple x;
804 if (lp == NULL)
805 lp = gen_eh_landing_pad (region);
807 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
808 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
810 x = gimple_build_label (lp->post_landing_pad);
811 gimple_seq_add_stmt (seq, x);
814 /* Emit a RESX statement into SEQ for REGION. */
816 static void
817 emit_resx (gimple_seq *seq, eh_region region)
819 gimple x = gimple_build_resx (region->index);
820 gimple_seq_add_stmt (seq, x);
821 if (region->outer)
822 record_stmt_eh_region (region->outer, x);
825 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
827 static void
828 emit_eh_dispatch (gimple_seq *seq, eh_region region)
830 gimple x = gimple_build_eh_dispatch (region->index);
831 gimple_seq_add_stmt (seq, x);
834 /* Note that the current EH region may contain a throw, or a
835 call to a function which itself may contain a throw. */
837 static void
838 note_eh_region_may_contain_throw (eh_region region)
840 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
842 if (region->type == ERT_MUST_NOT_THROW)
843 break;
844 region = region->outer;
845 if (region == NULL)
846 break;
850 /* Check if REGION has been marked as containing a throw. If REGION is
851 NULL, this predicate is false. */
853 static inline bool
854 eh_region_may_contain_throw (eh_region r)
856 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
859 /* We want to transform
860 try { body; } catch { stuff; }
862 normal_seqence:
863 body;
864 over:
865 eh_seqence:
866 landing_pad:
867 stuff;
868 goto over;
870 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
871 should be placed before the second operand, or NULL. OVER is
872 an existing label that should be put at the exit, or NULL. */
874 static gimple_seq
875 frob_into_branch_around (gimple tp, eh_region region, tree over)
877 gimple x;
878 gimple_seq cleanup, result;
879 location_t loc = gimple_location (tp);
881 cleanup = gimple_try_cleanup (tp);
882 result = gimple_try_eval (tp);
884 if (region)
885 emit_post_landing_pad (&eh_seq, region);
887 if (gimple_seq_may_fallthru (cleanup))
889 if (!over)
890 over = create_artificial_label (loc);
891 x = gimple_build_goto (over);
892 gimple_set_location (x, loc);
893 gimple_seq_add_stmt (&cleanup, x);
895 gimple_seq_add_seq (&eh_seq, cleanup);
897 if (over)
899 x = gimple_build_label (over);
900 gimple_seq_add_stmt (&result, x);
902 return result;
905 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
906 Make sure to record all new labels found. */
908 static gimple_seq
909 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
910 location_t loc)
912 gimple region = NULL;
913 gimple_seq new_seq;
914 gimple_stmt_iterator gsi;
916 new_seq = copy_gimple_seq_and_replace_locals (seq);
918 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
920 gimple stmt = gsi_stmt (gsi);
921 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
923 tree block = gimple_block (stmt);
924 gimple_set_location (stmt, loc);
925 gimple_set_block (stmt, block);
929 if (outer_state->tf)
930 region = outer_state->tf->try_finally_expr;
931 collect_finally_tree_1 (new_seq, region);
933 return new_seq;
936 /* A subroutine of lower_try_finally. Create a fallthru label for
937 the given try_finally state. The only tricky bit here is that
938 we have to make sure to record the label in our outer context. */
940 static tree
941 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
943 tree label = tf->fallthru_label;
944 treemple temp;
946 if (!label)
948 label = create_artificial_label (gimple_location (tf->try_finally_expr));
949 tf->fallthru_label = label;
950 if (tf->outer->tf)
952 temp.t = label;
953 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
956 return label;
959 /* A subroutine of lower_try_finally. If FINALLY consits of a
960 GIMPLE_EH_ELSE node, return it. */
962 static inline gimple
963 get_eh_else (gimple_seq finally)
965 gimple x = gimple_seq_first_stmt (finally);
966 if (gimple_code (x) == GIMPLE_EH_ELSE)
968 gcc_assert (gimple_seq_singleton_p (finally));
969 return x;
971 return NULL;
974 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
975 langhook returns non-null, then the language requires that the exception
976 path out of a try_finally be treated specially. To wit: the code within
977 the finally block may not itself throw an exception. We have two choices
978 here. First we can duplicate the finally block and wrap it in a
979 must_not_throw region. Second, we can generate code like
981 try {
982 finally_block;
983 } catch {
984 if (fintmp == eh_edge)
985 protect_cleanup_actions;
988 where "fintmp" is the temporary used in the switch statement generation
989 alternative considered below. For the nonce, we always choose the first
990 option.
992 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
994 static void
995 honor_protect_cleanup_actions (struct leh_state *outer_state,
996 struct leh_state *this_state,
997 struct leh_tf_state *tf)
999 tree protect_cleanup_actions;
1000 gimple_stmt_iterator gsi;
1001 bool finally_may_fallthru;
1002 gimple_seq finally;
1003 gimple x, eh_else;
1005 /* First check for nothing to do. */
1006 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1007 return;
1008 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
1009 if (protect_cleanup_actions == NULL)
1010 return;
1012 finally = gimple_try_cleanup (tf->top_p);
1013 eh_else = get_eh_else (finally);
1015 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1016 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1017 if (eh_else)
1019 finally = gimple_eh_else_e_body (eh_else);
1020 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1022 else if (this_state)
1023 finally = lower_try_finally_dup_block (finally, outer_state,
1024 gimple_location (tf->try_finally_expr));
1025 finally_may_fallthru = gimple_seq_may_fallthru (finally);
1027 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1028 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1029 to be in an enclosing scope, but needs to be implemented at this level
1030 to avoid a nesting violation (see wrap_temporary_cleanups in
1031 cp/decl.c). Since it's logically at an outer level, we should call
1032 terminate before we get to it, so strip it away before adding the
1033 MUST_NOT_THROW filter. */
1034 gsi = gsi_start (finally);
1035 x = gsi_stmt (gsi);
1036 if (gimple_code (x) == GIMPLE_TRY
1037 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1038 && gimple_try_catch_is_cleanup (x))
1040 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1041 gsi_remove (&gsi, false);
1044 /* Wrap the block with protect_cleanup_actions as the action. */
1045 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1046 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1047 GIMPLE_TRY_CATCH);
1048 finally = lower_eh_must_not_throw (outer_state, x);
1050 /* Drop all of this into the exception sequence. */
1051 emit_post_landing_pad (&eh_seq, tf->region);
1052 gimple_seq_add_seq (&eh_seq, finally);
1053 if (finally_may_fallthru)
1054 emit_resx (&eh_seq, tf->region);
1056 /* Having now been handled, EH isn't to be considered with
1057 the rest of the outgoing edges. */
1058 tf->may_throw = false;
1061 /* A subroutine of lower_try_finally. We have determined that there is
1062 no fallthru edge out of the finally block. This means that there is
1063 no outgoing edge corresponding to any incoming edge. Restructure the
1064 try_finally node for this special case. */
1066 static void
1067 lower_try_finally_nofallthru (struct leh_state *state,
1068 struct leh_tf_state *tf)
1070 tree lab;
1071 gimple x, eh_else;
1072 gimple_seq finally;
1073 struct goto_queue_node *q, *qe;
1075 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1077 /* We expect that tf->top_p is a GIMPLE_TRY. */
1078 finally = gimple_try_cleanup (tf->top_p);
1079 tf->top_p_seq = gimple_try_eval (tf->top_p);
1081 x = gimple_build_label (lab);
1082 gimple_seq_add_stmt (&tf->top_p_seq, x);
1084 q = tf->goto_queue;
1085 qe = q + tf->goto_queue_active;
1086 for (; q < qe; ++q)
1087 if (q->index < 0)
1088 do_return_redirection (q, lab, NULL);
1089 else
1090 do_goto_redirection (q, lab, NULL, tf);
1092 replace_goto_queue (tf);
1094 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1095 eh_else = get_eh_else (finally);
1096 if (eh_else)
1098 finally = gimple_eh_else_n_body (eh_else);
1099 lower_eh_constructs_1 (state, &finally);
1100 gimple_seq_add_seq (&tf->top_p_seq, finally);
1102 if (tf->may_throw)
1104 finally = gimple_eh_else_e_body (eh_else);
1105 lower_eh_constructs_1 (state, &finally);
1107 emit_post_landing_pad (&eh_seq, tf->region);
1108 gimple_seq_add_seq (&eh_seq, finally);
1111 else
1113 lower_eh_constructs_1 (state, &finally);
1114 gimple_seq_add_seq (&tf->top_p_seq, finally);
1116 if (tf->may_throw)
1118 emit_post_landing_pad (&eh_seq, tf->region);
1120 x = gimple_build_goto (lab);
1121 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1122 gimple_seq_add_stmt (&eh_seq, x);
1127 /* A subroutine of lower_try_finally. We have determined that there is
1128 exactly one destination of the finally block. Restructure the
1129 try_finally node for this special case. */
1131 static void
1132 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1134 struct goto_queue_node *q, *qe;
1135 gimple x;
1136 gimple_seq finally;
1137 gimple_stmt_iterator gsi;
1138 tree finally_label;
1139 location_t loc = gimple_location (tf->try_finally_expr);
1141 finally = gimple_try_cleanup (tf->top_p);
1142 tf->top_p_seq = gimple_try_eval (tf->top_p);
1144 /* Since there's only one destination, and the destination edge can only
1145 either be EH or non-EH, that implies that all of our incoming edges
1146 are of the same type. Therefore we can lower EH_ELSE immediately. */
1147 x = get_eh_else (finally);
1148 if (x)
1150 if (tf->may_throw)
1151 finally = gimple_eh_else_e_body (x);
1152 else
1153 finally = gimple_eh_else_n_body (x);
1156 lower_eh_constructs_1 (state, &finally);
1158 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1160 gimple stmt = gsi_stmt (gsi);
1161 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1163 tree block = gimple_block (stmt);
1164 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1165 gimple_set_block (stmt, block);
1169 if (tf->may_throw)
1171 /* Only reachable via the exception edge. Add the given label to
1172 the head of the FINALLY block. Append a RESX at the end. */
1173 emit_post_landing_pad (&eh_seq, tf->region);
1174 gimple_seq_add_seq (&eh_seq, finally);
1175 emit_resx (&eh_seq, tf->region);
1176 return;
1179 if (tf->may_fallthru)
1181 /* Only reachable via the fallthru edge. Do nothing but let
1182 the two blocks run together; we'll fall out the bottom. */
1183 gimple_seq_add_seq (&tf->top_p_seq, finally);
1184 return;
1187 finally_label = create_artificial_label (loc);
1188 x = gimple_build_label (finally_label);
1189 gimple_seq_add_stmt (&tf->top_p_seq, x);
1191 gimple_seq_add_seq (&tf->top_p_seq, finally);
1193 q = tf->goto_queue;
1194 qe = q + tf->goto_queue_active;
1196 if (tf->may_return)
1198 /* Reachable by return expressions only. Redirect them. */
1199 for (; q < qe; ++q)
1200 do_return_redirection (q, finally_label, NULL);
1201 replace_goto_queue (tf);
1203 else
1205 /* Reachable by goto expressions only. Redirect them. */
1206 for (; q < qe; ++q)
1207 do_goto_redirection (q, finally_label, NULL, tf);
1208 replace_goto_queue (tf);
1210 if (tf->dest_array[0] == tf->fallthru_label)
1212 /* Reachable by goto to fallthru label only. Redirect it
1213 to the new label (already created, sadly), and do not
1214 emit the final branch out, or the fallthru label. */
1215 tf->fallthru_label = NULL;
1216 return;
1220 /* Place the original return/goto to the original destination
1221 immediately after the finally block. */
1222 x = tf->goto_queue[0].cont_stmt;
1223 gimple_seq_add_stmt (&tf->top_p_seq, x);
1224 maybe_record_in_goto_queue (state, x);
1227 /* A subroutine of lower_try_finally. There are multiple edges incoming
1228 and outgoing from the finally block. Implement this by duplicating the
1229 finally block for every destination. */
1231 static void
1232 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1234 gimple_seq finally;
1235 gimple_seq new_stmt;
1236 gimple_seq seq;
1237 gimple x, eh_else;
1238 tree tmp;
1239 location_t tf_loc = gimple_location (tf->try_finally_expr);
1241 finally = gimple_try_cleanup (tf->top_p);
1243 /* Notice EH_ELSE, and simplify some of the remaining code
1244 by considering FINALLY to be the normal return path only. */
1245 eh_else = get_eh_else (finally);
1246 if (eh_else)
1247 finally = gimple_eh_else_n_body (eh_else);
1249 tf->top_p_seq = gimple_try_eval (tf->top_p);
1250 new_stmt = NULL;
1252 if (tf->may_fallthru)
1254 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1255 lower_eh_constructs_1 (state, &seq);
1256 gimple_seq_add_seq (&new_stmt, seq);
1258 tmp = lower_try_finally_fallthru_label (tf);
1259 x = gimple_build_goto (tmp);
1260 gimple_set_location (x, tf_loc);
1261 gimple_seq_add_stmt (&new_stmt, x);
1264 if (tf->may_throw)
1266 /* We don't need to copy the EH path of EH_ELSE,
1267 since it is only emitted once. */
1268 if (eh_else)
1269 seq = gimple_eh_else_e_body (eh_else);
1270 else
1271 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1272 lower_eh_constructs_1 (state, &seq);
1274 emit_post_landing_pad (&eh_seq, tf->region);
1275 gimple_seq_add_seq (&eh_seq, seq);
1276 emit_resx (&eh_seq, tf->region);
1279 if (tf->goto_queue)
1281 struct goto_queue_node *q, *qe;
1282 int return_index, index;
1283 struct labels_s
1285 struct goto_queue_node *q;
1286 tree label;
1287 } *labels;
1289 return_index = tf->dest_array.length ();
1290 labels = XCNEWVEC (struct labels_s, return_index + 1);
1292 q = tf->goto_queue;
1293 qe = q + tf->goto_queue_active;
1294 for (; q < qe; q++)
1296 index = q->index < 0 ? return_index : q->index;
1298 if (!labels[index].q)
1299 labels[index].q = q;
1302 for (index = 0; index < return_index + 1; index++)
1304 tree lab;
1306 q = labels[index].q;
1307 if (! q)
1308 continue;
1310 lab = labels[index].label
1311 = create_artificial_label (tf_loc);
1313 if (index == return_index)
1314 do_return_redirection (q, lab, NULL);
1315 else
1316 do_goto_redirection (q, lab, NULL, tf);
1318 x = gimple_build_label (lab);
1319 gimple_seq_add_stmt (&new_stmt, x);
1321 seq = lower_try_finally_dup_block (finally, state, q->location);
1322 lower_eh_constructs_1 (state, &seq);
1323 gimple_seq_add_seq (&new_stmt, seq);
1325 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1326 maybe_record_in_goto_queue (state, q->cont_stmt);
1329 for (q = tf->goto_queue; q < qe; q++)
1331 tree lab;
1333 index = q->index < 0 ? return_index : q->index;
1335 if (labels[index].q == q)
1336 continue;
1338 lab = labels[index].label;
1340 if (index == return_index)
1341 do_return_redirection (q, lab, NULL);
1342 else
1343 do_goto_redirection (q, lab, NULL, tf);
1346 replace_goto_queue (tf);
1347 free (labels);
1350 /* Need to link new stmts after running replace_goto_queue due
1351 to not wanting to process the same goto stmts twice. */
1352 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1355 /* A subroutine of lower_try_finally. There are multiple edges incoming
1356 and outgoing from the finally block. Implement this by instrumenting
1357 each incoming edge and creating a switch statement at the end of the
1358 finally block that branches to the appropriate destination. */
1360 static void
1361 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1363 struct goto_queue_node *q, *qe;
1364 tree finally_tmp, finally_label;
1365 int return_index, eh_index, fallthru_index;
1366 int nlabels, ndests, j, last_case_index;
1367 tree last_case;
1368 vec<tree> case_label_vec;
1369 gimple_seq switch_body = NULL;
1370 gimple x, eh_else;
1371 tree tmp;
1372 gimple switch_stmt;
1373 gimple_seq finally;
1374 struct pointer_map_t *cont_map = NULL;
1375 /* The location of the TRY_FINALLY stmt. */
1376 location_t tf_loc = gimple_location (tf->try_finally_expr);
1377 /* The location of the finally block. */
1378 location_t finally_loc;
1380 finally = gimple_try_cleanup (tf->top_p);
1381 eh_else = get_eh_else (finally);
1383 /* Mash the TRY block to the head of the chain. */
1384 tf->top_p_seq = gimple_try_eval (tf->top_p);
1386 /* The location of the finally is either the last stmt in the finally
1387 block or the location of the TRY_FINALLY itself. */
1388 x = gimple_seq_last_stmt (finally);
1389 finally_loc = x ? gimple_location (x) : tf_loc;
1391 /* Prepare for switch statement generation. */
1392 nlabels = tf->dest_array.length ();
1393 return_index = nlabels;
1394 eh_index = return_index + tf->may_return;
1395 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1396 ndests = fallthru_index + tf->may_fallthru;
1398 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1399 finally_label = create_artificial_label (finally_loc);
1401 /* We use vec::quick_push on case_label_vec throughout this function,
1402 since we know the size in advance and allocate precisely as muce
1403 space as needed. */
1404 case_label_vec.create (ndests);
1405 last_case = NULL;
1406 last_case_index = 0;
1408 /* Begin inserting code for getting to the finally block. Things
1409 are done in this order to correspond to the sequence the code is
1410 laid out. */
1412 if (tf->may_fallthru)
1414 x = gimple_build_assign (finally_tmp,
1415 build_int_cst (integer_type_node,
1416 fallthru_index));
1417 gimple_seq_add_stmt (&tf->top_p_seq, x);
1419 tmp = build_int_cst (integer_type_node, fallthru_index);
1420 last_case = build_case_label (tmp, NULL,
1421 create_artificial_label (tf_loc));
1422 case_label_vec.quick_push (last_case);
1423 last_case_index++;
1425 x = gimple_build_label (CASE_LABEL (last_case));
1426 gimple_seq_add_stmt (&switch_body, x);
1428 tmp = lower_try_finally_fallthru_label (tf);
1429 x = gimple_build_goto (tmp);
1430 gimple_set_location (x, tf_loc);
1431 gimple_seq_add_stmt (&switch_body, x);
1434 /* For EH_ELSE, emit the exception path (plus resx) now, then
1435 subsequently we only need consider the normal path. */
1436 if (eh_else)
1438 if (tf->may_throw)
1440 finally = gimple_eh_else_e_body (eh_else);
1441 lower_eh_constructs_1 (state, &finally);
1443 emit_post_landing_pad (&eh_seq, tf->region);
1444 gimple_seq_add_seq (&eh_seq, finally);
1445 emit_resx (&eh_seq, tf->region);
1448 finally = gimple_eh_else_n_body (eh_else);
1450 else if (tf->may_throw)
1452 emit_post_landing_pad (&eh_seq, tf->region);
1454 x = gimple_build_assign (finally_tmp,
1455 build_int_cst (integer_type_node, eh_index));
1456 gimple_seq_add_stmt (&eh_seq, x);
1458 x = gimple_build_goto (finally_label);
1459 gimple_set_location (x, tf_loc);
1460 gimple_seq_add_stmt (&eh_seq, x);
1462 tmp = build_int_cst (integer_type_node, eh_index);
1463 last_case = build_case_label (tmp, NULL,
1464 create_artificial_label (tf_loc));
1465 case_label_vec.quick_push (last_case);
1466 last_case_index++;
1468 x = gimple_build_label (CASE_LABEL (last_case));
1469 gimple_seq_add_stmt (&eh_seq, x);
1470 emit_resx (&eh_seq, tf->region);
1473 x = gimple_build_label (finally_label);
1474 gimple_seq_add_stmt (&tf->top_p_seq, x);
1476 lower_eh_constructs_1 (state, &finally);
1477 gimple_seq_add_seq (&tf->top_p_seq, finally);
1479 /* Redirect each incoming goto edge. */
1480 q = tf->goto_queue;
1481 qe = q + tf->goto_queue_active;
1482 j = last_case_index + tf->may_return;
1483 /* Prepare the assignments to finally_tmp that are executed upon the
1484 entrance through a particular edge. */
1485 for (; q < qe; ++q)
1487 gimple_seq mod = NULL;
1488 int switch_id;
1489 unsigned int case_index;
1491 if (q->index < 0)
1493 x = gimple_build_assign (finally_tmp,
1494 build_int_cst (integer_type_node,
1495 return_index));
1496 gimple_seq_add_stmt (&mod, x);
1497 do_return_redirection (q, finally_label, mod);
1498 switch_id = return_index;
1500 else
1502 x = gimple_build_assign (finally_tmp,
1503 build_int_cst (integer_type_node, q->index));
1504 gimple_seq_add_stmt (&mod, x);
1505 do_goto_redirection (q, finally_label, mod, tf);
1506 switch_id = q->index;
1509 case_index = j + q->index;
1510 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1512 tree case_lab;
1513 void **slot;
1514 tmp = build_int_cst (integer_type_node, switch_id);
1515 case_lab = build_case_label (tmp, NULL,
1516 create_artificial_label (tf_loc));
1517 /* We store the cont_stmt in the pointer map, so that we can recover
1518 it in the loop below. */
1519 if (!cont_map)
1520 cont_map = pointer_map_create ();
1521 slot = pointer_map_insert (cont_map, case_lab);
1522 *slot = q->cont_stmt;
1523 case_label_vec.quick_push (case_lab);
1526 for (j = last_case_index; j < last_case_index + nlabels; j++)
1528 gimple cont_stmt;
1529 void **slot;
1531 last_case = case_label_vec[j];
1533 gcc_assert (last_case);
1534 gcc_assert (cont_map);
1536 slot = pointer_map_contains (cont_map, last_case);
1537 gcc_assert (slot);
1538 cont_stmt = *(gimple *) slot;
1540 x = gimple_build_label (CASE_LABEL (last_case));
1541 gimple_seq_add_stmt (&switch_body, x);
1542 gimple_seq_add_stmt (&switch_body, cont_stmt);
1543 maybe_record_in_goto_queue (state, cont_stmt);
1545 if (cont_map)
1546 pointer_map_destroy (cont_map);
1548 replace_goto_queue (tf);
1550 /* Make sure that the last case is the default label, as one is required.
1551 Then sort the labels, which is also required in GIMPLE. */
1552 CASE_LOW (last_case) = NULL;
1553 sort_case_labels (case_label_vec);
1555 /* Build the switch statement, setting last_case to be the default
1556 label. */
1557 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1558 case_label_vec);
1559 gimple_set_location (switch_stmt, finally_loc);
1561 /* Need to link SWITCH_STMT after running replace_goto_queue
1562 due to not wanting to process the same goto stmts twice. */
1563 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1564 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1567 /* Decide whether or not we are going to duplicate the finally block.
1568 There are several considerations.
1570 First, if this is Java, then the finally block contains code
1571 written by the user. It has line numbers associated with it,
1572 so duplicating the block means it's difficult to set a breakpoint.
1573 Since controlling code generation via -g is verboten, we simply
1574 never duplicate code without optimization.
1576 Second, we'd like to prevent egregious code growth. One way to
1577 do this is to estimate the size of the finally block, multiply
1578 that by the number of copies we'd need to make, and compare against
1579 the estimate of the size of the switch machinery we'd have to add. */
1581 static bool
1582 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1584 int f_estimate, sw_estimate;
1585 gimple eh_else;
1587 /* If there's an EH_ELSE involved, the exception path is separate
1588 and really doesn't come into play for this computation. */
1589 eh_else = get_eh_else (finally);
1590 if (eh_else)
1592 ndests -= may_throw;
1593 finally = gimple_eh_else_n_body (eh_else);
1596 if (!optimize)
1598 gimple_stmt_iterator gsi;
1600 if (ndests == 1)
1601 return true;
1603 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1605 gimple stmt = gsi_stmt (gsi);
1606 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1607 return false;
1609 return true;
1612 /* Finally estimate N times, plus N gotos. */
1613 f_estimate = count_insns_seq (finally, &eni_size_weights);
1614 f_estimate = (f_estimate + 1) * ndests;
1616 /* Switch statement (cost 10), N variable assignments, N gotos. */
1617 sw_estimate = 10 + 2 * ndests;
1619 /* Optimize for size clearly wants our best guess. */
1620 if (optimize_function_for_size_p (cfun))
1621 return f_estimate < sw_estimate;
1623 /* ??? These numbers are completely made up so far. */
1624 if (optimize > 1)
1625 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1626 else
1627 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1630 /* REG is the enclosing region for a possible cleanup region, or the region
1631 itself. Returns TRUE if such a region would be unreachable.
1633 Cleanup regions within a must-not-throw region aren't actually reachable
1634 even if there are throwing stmts within them, because the personality
1635 routine will call terminate before unwinding. */
1637 static bool
1638 cleanup_is_dead_in (eh_region reg)
1640 while (reg && reg->type == ERT_CLEANUP)
1641 reg = reg->outer;
1642 return (reg && reg->type == ERT_MUST_NOT_THROW);
1645 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1646 to a sequence of labels and blocks, plus the exception region trees
1647 that record all the magic. This is complicated by the need to
1648 arrange for the FINALLY block to be executed on all exits. */
1650 static gimple_seq
1651 lower_try_finally (struct leh_state *state, gimple tp)
1653 struct leh_tf_state this_tf;
1654 struct leh_state this_state;
1655 int ndests;
1656 gimple_seq old_eh_seq;
1658 /* Process the try block. */
1660 memset (&this_tf, 0, sizeof (this_tf));
1661 this_tf.try_finally_expr = tp;
1662 this_tf.top_p = tp;
1663 this_tf.outer = state;
1664 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
1666 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1667 this_state.cur_region = this_tf.region;
1669 else
1671 this_tf.region = NULL;
1672 this_state.cur_region = state->cur_region;
1675 this_state.ehp_region = state->ehp_region;
1676 this_state.tf = &this_tf;
1678 old_eh_seq = eh_seq;
1679 eh_seq = NULL;
1681 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1683 /* Determine if the try block is escaped through the bottom. */
1684 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1686 /* Determine if any exceptions are possible within the try block. */
1687 if (this_tf.region)
1688 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1689 if (this_tf.may_throw)
1690 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1692 /* Determine how many edges (still) reach the finally block. Or rather,
1693 how many destinations are reached by the finally block. Use this to
1694 determine how we process the finally block itself. */
1696 ndests = this_tf.dest_array.length ();
1697 ndests += this_tf.may_fallthru;
1698 ndests += this_tf.may_return;
1699 ndests += this_tf.may_throw;
1701 /* If the FINALLY block is not reachable, dike it out. */
1702 if (ndests == 0)
1704 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1705 gimple_try_set_cleanup (tp, NULL);
1707 /* If the finally block doesn't fall through, then any destination
1708 we might try to impose there isn't reached either. There may be
1709 some minor amount of cleanup and redirection still needed. */
1710 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1711 lower_try_finally_nofallthru (state, &this_tf);
1713 /* We can easily special-case redirection to a single destination. */
1714 else if (ndests == 1)
1715 lower_try_finally_onedest (state, &this_tf);
1716 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1717 gimple_try_cleanup (tp)))
1718 lower_try_finally_copy (state, &this_tf);
1719 else
1720 lower_try_finally_switch (state, &this_tf);
1722 /* If someone requested we add a label at the end of the transformed
1723 block, do so. */
1724 if (this_tf.fallthru_label)
1726 /* This must be reached only if ndests == 0. */
1727 gimple x = gimple_build_label (this_tf.fallthru_label);
1728 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1731 this_tf.dest_array.release ();
1732 free (this_tf.goto_queue);
1733 if (this_tf.goto_queue_map)
1734 pointer_map_destroy (this_tf.goto_queue_map);
1736 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1737 If there was no old eh_seq, then the append is trivially already done. */
1738 if (old_eh_seq)
1740 if (eh_seq == NULL)
1741 eh_seq = old_eh_seq;
1742 else
1744 gimple_seq new_eh_seq = eh_seq;
1745 eh_seq = old_eh_seq;
1746 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1750 return this_tf.top_p_seq;
1753 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1754 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1755 exception region trees that records all the magic. */
1757 static gimple_seq
1758 lower_catch (struct leh_state *state, gimple tp)
1760 eh_region try_region = NULL;
1761 struct leh_state this_state = *state;
1762 gimple_stmt_iterator gsi;
1763 tree out_label;
1764 gimple_seq new_seq, cleanup;
1765 gimple x;
1766 location_t try_catch_loc = gimple_location (tp);
1768 if (flag_exceptions)
1770 try_region = gen_eh_region_try (state->cur_region);
1771 this_state.cur_region = try_region;
1774 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1776 if (!eh_region_may_contain_throw (try_region))
1777 return gimple_try_eval (tp);
1779 new_seq = NULL;
1780 emit_eh_dispatch (&new_seq, try_region);
1781 emit_resx (&new_seq, try_region);
1783 this_state.cur_region = state->cur_region;
1784 this_state.ehp_region = try_region;
1786 out_label = NULL;
1787 cleanup = gimple_try_cleanup (tp);
1788 for (gsi = gsi_start (cleanup);
1789 !gsi_end_p (gsi);
1790 gsi_next (&gsi))
1792 eh_catch c;
1793 gimple gcatch;
1794 gimple_seq handler;
1796 gcatch = gsi_stmt (gsi);
1797 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1799 handler = gimple_catch_handler (gcatch);
1800 lower_eh_constructs_1 (&this_state, &handler);
1802 c->label = create_artificial_label (UNKNOWN_LOCATION);
1803 x = gimple_build_label (c->label);
1804 gimple_seq_add_stmt (&new_seq, x);
1806 gimple_seq_add_seq (&new_seq, handler);
1808 if (gimple_seq_may_fallthru (new_seq))
1810 if (!out_label)
1811 out_label = create_artificial_label (try_catch_loc);
1813 x = gimple_build_goto (out_label);
1814 gimple_seq_add_stmt (&new_seq, x);
1816 if (!c->type_list)
1817 break;
1820 gimple_try_set_cleanup (tp, new_seq);
1822 return frob_into_branch_around (tp, try_region, out_label);
1825 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1826 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1827 region trees that record all the magic. */
1829 static gimple_seq
1830 lower_eh_filter (struct leh_state *state, gimple tp)
1832 struct leh_state this_state = *state;
1833 eh_region this_region = NULL;
1834 gimple inner, x;
1835 gimple_seq new_seq;
1837 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1839 if (flag_exceptions)
1841 this_region = gen_eh_region_allowed (state->cur_region,
1842 gimple_eh_filter_types (inner));
1843 this_state.cur_region = this_region;
1846 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1848 if (!eh_region_may_contain_throw (this_region))
1849 return gimple_try_eval (tp);
1851 new_seq = NULL;
1852 this_state.cur_region = state->cur_region;
1853 this_state.ehp_region = this_region;
1855 emit_eh_dispatch (&new_seq, this_region);
1856 emit_resx (&new_seq, this_region);
1858 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1859 x = gimple_build_label (this_region->u.allowed.label);
1860 gimple_seq_add_stmt (&new_seq, x);
1862 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1863 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1865 gimple_try_set_cleanup (tp, new_seq);
1867 return frob_into_branch_around (tp, this_region, NULL);
1870 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1871 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1872 plus the exception region trees that record all the magic. */
1874 static gimple_seq
1875 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1877 struct leh_state this_state = *state;
1879 if (flag_exceptions)
1881 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1882 eh_region this_region;
1884 this_region = gen_eh_region_must_not_throw (state->cur_region);
1885 this_region->u.must_not_throw.failure_decl
1886 = gimple_eh_must_not_throw_fndecl (inner);
1887 this_region->u.must_not_throw.failure_loc
1888 = LOCATION_LOCUS (gimple_location (tp));
1890 /* In order to get mangling applied to this decl, we must mark it
1891 used now. Otherwise, pass_ipa_free_lang_data won't think it
1892 needs to happen. */
1893 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1895 this_state.cur_region = this_region;
1898 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1900 return gimple_try_eval (tp);
1903 /* Implement a cleanup expression. This is similar to try-finally,
1904 except that we only execute the cleanup block for exception edges. */
1906 static gimple_seq
1907 lower_cleanup (struct leh_state *state, gimple tp)
1909 struct leh_state this_state = *state;
1910 eh_region this_region = NULL;
1911 struct leh_tf_state fake_tf;
1912 gimple_seq result;
1913 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1915 if (flag_exceptions && !cleanup_dead)
1917 this_region = gen_eh_region_cleanup (state->cur_region);
1918 this_state.cur_region = this_region;
1921 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1923 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1924 return gimple_try_eval (tp);
1926 /* Build enough of a try-finally state so that we can reuse
1927 honor_protect_cleanup_actions. */
1928 memset (&fake_tf, 0, sizeof (fake_tf));
1929 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1930 fake_tf.outer = state;
1931 fake_tf.region = this_region;
1932 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1933 fake_tf.may_throw = true;
1935 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1937 if (fake_tf.may_throw)
1939 /* In this case honor_protect_cleanup_actions had nothing to do,
1940 and we should process this normally. */
1941 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1942 result = frob_into_branch_around (tp, this_region,
1943 fake_tf.fallthru_label);
1945 else
1947 /* In this case honor_protect_cleanup_actions did nearly all of
1948 the work. All we have left is to append the fallthru_label. */
1950 result = gimple_try_eval (tp);
1951 if (fake_tf.fallthru_label)
1953 gimple x = gimple_build_label (fake_tf.fallthru_label);
1954 gimple_seq_add_stmt (&result, x);
1957 return result;
1960 /* Main loop for lowering eh constructs. Also moves gsi to the next
1961 statement. */
1963 static void
1964 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1966 gimple_seq replace;
1967 gimple x;
1968 gimple stmt = gsi_stmt (*gsi);
1970 switch (gimple_code (stmt))
1972 case GIMPLE_CALL:
1974 tree fndecl = gimple_call_fndecl (stmt);
1975 tree rhs, lhs;
1977 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1978 switch (DECL_FUNCTION_CODE (fndecl))
1980 case BUILT_IN_EH_POINTER:
1981 /* The front end may have generated a call to
1982 __builtin_eh_pointer (0) within a catch region. Replace
1983 this zero argument with the current catch region number. */
1984 if (state->ehp_region)
1986 tree nr = build_int_cst (integer_type_node,
1987 state->ehp_region->index);
1988 gimple_call_set_arg (stmt, 0, nr);
1990 else
1992 /* The user has dome something silly. Remove it. */
1993 rhs = null_pointer_node;
1994 goto do_replace;
1996 break;
1998 case BUILT_IN_EH_FILTER:
1999 /* ??? This should never appear, but since it's a builtin it
2000 is accessible to abuse by users. Just remove it and
2001 replace the use with the arbitrary value zero. */
2002 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2003 do_replace:
2004 lhs = gimple_call_lhs (stmt);
2005 x = gimple_build_assign (lhs, rhs);
2006 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2007 /* FALLTHRU */
2009 case BUILT_IN_EH_COPY_VALUES:
2010 /* Likewise this should not appear. Remove it. */
2011 gsi_remove (gsi, true);
2012 return;
2014 default:
2015 break;
2018 /* FALLTHRU */
2020 case GIMPLE_ASSIGN:
2021 /* If the stmt can throw use a new temporary for the assignment
2022 to a LHS. This makes sure the old value of the LHS is
2023 available on the EH edge. Only do so for statements that
2024 potentially fall through (no noreturn calls e.g.), otherwise
2025 this new assignment might create fake fallthru regions. */
2026 if (stmt_could_throw_p (stmt)
2027 && gimple_has_lhs (stmt)
2028 && gimple_stmt_may_fallthru (stmt)
2029 && !tree_could_throw_p (gimple_get_lhs (stmt))
2030 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2032 tree lhs = gimple_get_lhs (stmt);
2033 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
2034 gimple s = gimple_build_assign (lhs, tmp);
2035 gimple_set_location (s, gimple_location (stmt));
2036 gimple_set_block (s, gimple_block (stmt));
2037 gimple_set_lhs (stmt, tmp);
2038 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2039 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2040 DECL_GIMPLE_REG_P (tmp) = 1;
2041 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2043 /* Look for things that can throw exceptions, and record them. */
2044 if (state->cur_region && stmt_could_throw_p (stmt))
2046 record_stmt_eh_region (state->cur_region, stmt);
2047 note_eh_region_may_contain_throw (state->cur_region);
2049 break;
2051 case GIMPLE_COND:
2052 case GIMPLE_GOTO:
2053 case GIMPLE_RETURN:
2054 maybe_record_in_goto_queue (state, stmt);
2055 break;
2057 case GIMPLE_SWITCH:
2058 verify_norecord_switch_expr (state, stmt);
2059 break;
2061 case GIMPLE_TRY:
2062 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2063 replace = lower_try_finally (state, stmt);
2064 else
2066 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2067 if (!x)
2069 replace = gimple_try_eval (stmt);
2070 lower_eh_constructs_1 (state, &replace);
2072 else
2073 switch (gimple_code (x))
2075 case GIMPLE_CATCH:
2076 replace = lower_catch (state, stmt);
2077 break;
2078 case GIMPLE_EH_FILTER:
2079 replace = lower_eh_filter (state, stmt);
2080 break;
2081 case GIMPLE_EH_MUST_NOT_THROW:
2082 replace = lower_eh_must_not_throw (state, stmt);
2083 break;
2084 case GIMPLE_EH_ELSE:
2085 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2086 gcc_unreachable ();
2087 default:
2088 replace = lower_cleanup (state, stmt);
2089 break;
2093 /* Remove the old stmt and insert the transformed sequence
2094 instead. */
2095 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2096 gsi_remove (gsi, true);
2098 /* Return since we don't want gsi_next () */
2099 return;
2101 case GIMPLE_EH_ELSE:
2102 /* We should be eliminating this in lower_try_finally et al. */
2103 gcc_unreachable ();
2105 default:
2106 /* A type, a decl, or some kind of statement that we're not
2107 interested in. Don't walk them. */
2108 break;
2111 gsi_next (gsi);
2114 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2116 static void
2117 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2119 gimple_stmt_iterator gsi;
2120 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2121 lower_eh_constructs_2 (state, &gsi);
2124 namespace {
2126 const pass_data pass_data_lower_eh =
2128 GIMPLE_PASS, /* type */
2129 "eh", /* name */
2130 OPTGROUP_NONE, /* optinfo_flags */
2131 true, /* has_execute */
2132 TV_TREE_EH, /* tv_id */
2133 PROP_gimple_lcf, /* properties_required */
2134 PROP_gimple_leh, /* properties_provided */
2135 0, /* properties_destroyed */
2136 0, /* todo_flags_start */
2137 0, /* todo_flags_finish */
2140 class pass_lower_eh : public gimple_opt_pass
2142 public:
2143 pass_lower_eh (gcc::context *ctxt)
2144 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2147 /* opt_pass methods: */
2148 virtual unsigned int execute (function *);
2150 }; // class pass_lower_eh
2152 unsigned int
2153 pass_lower_eh::execute (function *fun)
2155 struct leh_state null_state;
2156 gimple_seq bodyp;
2158 bodyp = gimple_body (current_function_decl);
2159 if (bodyp == NULL)
2160 return 0;
2162 finally_tree.create (31);
2163 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2164 memset (&null_state, 0, sizeof (null_state));
2166 collect_finally_tree_1 (bodyp, NULL);
2167 lower_eh_constructs_1 (&null_state, &bodyp);
2168 gimple_set_body (current_function_decl, bodyp);
2170 /* We assume there's a return statement, or something, at the end of
2171 the function, and thus ploping the EH sequence afterward won't
2172 change anything. */
2173 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2174 gimple_seq_add_seq (&bodyp, eh_seq);
2176 /* We assume that since BODYP already existed, adding EH_SEQ to it
2177 didn't change its value, and we don't have to re-set the function. */
2178 gcc_assert (bodyp == gimple_body (current_function_decl));
2180 finally_tree.dispose ();
2181 BITMAP_FREE (eh_region_may_contain_throw_map);
2182 eh_seq = NULL;
2184 /* If this function needs a language specific EH personality routine
2185 and the frontend didn't already set one do so now. */
2186 if (function_needs_eh_personality (fun) == eh_personality_lang
2187 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2188 DECL_FUNCTION_PERSONALITY (current_function_decl)
2189 = lang_hooks.eh_personality ();
2191 return 0;
2194 } // anon namespace
2196 gimple_opt_pass *
2197 make_pass_lower_eh (gcc::context *ctxt)
2199 return new pass_lower_eh (ctxt);
2202 /* Create the multiple edges from an EH_DISPATCH statement to all of
2203 the possible handlers for its EH region. Return true if there's
2204 no fallthru edge; false if there is. */
2206 bool
2207 make_eh_dispatch_edges (gimple stmt)
2209 eh_region r;
2210 eh_catch c;
2211 basic_block src, dst;
2213 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2214 src = gimple_bb (stmt);
2216 switch (r->type)
2218 case ERT_TRY:
2219 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2221 dst = label_to_block (c->label);
2222 make_edge (src, dst, 0);
2224 /* A catch-all handler doesn't have a fallthru. */
2225 if (c->type_list == NULL)
2226 return false;
2228 break;
2230 case ERT_ALLOWED_EXCEPTIONS:
2231 dst = label_to_block (r->u.allowed.label);
2232 make_edge (src, dst, 0);
2233 break;
2235 default:
2236 gcc_unreachable ();
2239 return true;
2242 /* Create the single EH edge from STMT to its nearest landing pad,
2243 if there is such a landing pad within the current function. */
2245 void
2246 make_eh_edges (gimple stmt)
2248 basic_block src, dst;
2249 eh_landing_pad lp;
2250 int lp_nr;
2252 lp_nr = lookup_stmt_eh_lp (stmt);
2253 if (lp_nr <= 0)
2254 return;
2256 lp = get_eh_landing_pad_from_number (lp_nr);
2257 gcc_assert (lp != NULL);
2259 src = gimple_bb (stmt);
2260 dst = label_to_block (lp->post_landing_pad);
2261 make_edge (src, dst, EDGE_EH);
2264 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2265 do not actually perform the final edge redirection.
2267 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2268 we intend to change the destination EH region as well; this means
2269 EH_LANDING_PAD_NR must already be set on the destination block label.
2270 If false, we're being called from generic cfg manipulation code and we
2271 should preserve our place within the region tree. */
2273 static void
2274 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2276 eh_landing_pad old_lp, new_lp;
2277 basic_block old_bb;
2278 gimple throw_stmt;
2279 int old_lp_nr, new_lp_nr;
2280 tree old_label, new_label;
2281 edge_iterator ei;
2282 edge e;
2284 old_bb = edge_in->dest;
2285 old_label = gimple_block_label (old_bb);
2286 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2287 gcc_assert (old_lp_nr > 0);
2288 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2290 throw_stmt = last_stmt (edge_in->src);
2291 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2293 new_label = gimple_block_label (new_bb);
2295 /* Look for an existing region that might be using NEW_BB already. */
2296 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2297 if (new_lp_nr)
2299 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2300 gcc_assert (new_lp);
2302 /* Unless CHANGE_REGION is true, the new and old landing pad
2303 had better be associated with the same EH region. */
2304 gcc_assert (change_region || new_lp->region == old_lp->region);
2306 else
2308 new_lp = NULL;
2309 gcc_assert (!change_region);
2312 /* Notice when we redirect the last EH edge away from OLD_BB. */
2313 FOR_EACH_EDGE (e, ei, old_bb->preds)
2314 if (e != edge_in && (e->flags & EDGE_EH))
2315 break;
2317 if (new_lp)
2319 /* NEW_LP already exists. If there are still edges into OLD_LP,
2320 there's nothing to do with the EH tree. If there are no more
2321 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2322 If CHANGE_REGION is true, then our caller is expecting to remove
2323 the landing pad. */
2324 if (e == NULL && !change_region)
2325 remove_eh_landing_pad (old_lp);
2327 else
2329 /* No correct landing pad exists. If there are no more edges
2330 into OLD_LP, then we can simply re-use the existing landing pad.
2331 Otherwise, we have to create a new landing pad. */
2332 if (e == NULL)
2334 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2335 new_lp = old_lp;
2337 else
2338 new_lp = gen_eh_landing_pad (old_lp->region);
2339 new_lp->post_landing_pad = new_label;
2340 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2343 /* Maybe move the throwing statement to the new region. */
2344 if (old_lp != new_lp)
2346 remove_stmt_from_eh_lp (throw_stmt);
2347 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2351 /* Redirect EH edge E to NEW_BB. */
2353 edge
2354 redirect_eh_edge (edge edge_in, basic_block new_bb)
2356 redirect_eh_edge_1 (edge_in, new_bb, false);
2357 return ssa_redirect_edge (edge_in, new_bb);
2360 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2361 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2362 The actual edge update will happen in the caller. */
2364 void
2365 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2367 tree new_lab = gimple_block_label (new_bb);
2368 bool any_changed = false;
2369 basic_block old_bb;
2370 eh_region r;
2371 eh_catch c;
2373 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2374 switch (r->type)
2376 case ERT_TRY:
2377 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2379 old_bb = label_to_block (c->label);
2380 if (old_bb == e->dest)
2382 c->label = new_lab;
2383 any_changed = true;
2386 break;
2388 case ERT_ALLOWED_EXCEPTIONS:
2389 old_bb = label_to_block (r->u.allowed.label);
2390 gcc_assert (old_bb == e->dest);
2391 r->u.allowed.label = new_lab;
2392 any_changed = true;
2393 break;
2395 default:
2396 gcc_unreachable ();
2399 gcc_assert (any_changed);
2402 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2404 bool
2405 operation_could_trap_helper_p (enum tree_code op,
2406 bool fp_operation,
2407 bool honor_trapv,
2408 bool honor_nans,
2409 bool honor_snans,
2410 tree divisor,
2411 bool *handled)
2413 *handled = true;
2414 switch (op)
2416 case TRUNC_DIV_EXPR:
2417 case CEIL_DIV_EXPR:
2418 case FLOOR_DIV_EXPR:
2419 case ROUND_DIV_EXPR:
2420 case EXACT_DIV_EXPR:
2421 case CEIL_MOD_EXPR:
2422 case FLOOR_MOD_EXPR:
2423 case ROUND_MOD_EXPR:
2424 case TRUNC_MOD_EXPR:
2425 case RDIV_EXPR:
2426 if (honor_snans || honor_trapv)
2427 return true;
2428 if (fp_operation)
2429 return flag_trapping_math;
2430 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2431 return true;
2432 return false;
2434 case LT_EXPR:
2435 case LE_EXPR:
2436 case GT_EXPR:
2437 case GE_EXPR:
2438 case LTGT_EXPR:
2439 /* Some floating point comparisons may trap. */
2440 return honor_nans;
2442 case EQ_EXPR:
2443 case NE_EXPR:
2444 case UNORDERED_EXPR:
2445 case ORDERED_EXPR:
2446 case UNLT_EXPR:
2447 case UNLE_EXPR:
2448 case UNGT_EXPR:
2449 case UNGE_EXPR:
2450 case UNEQ_EXPR:
2451 return honor_snans;
2453 case CONVERT_EXPR:
2454 case FIX_TRUNC_EXPR:
2455 /* Conversion of floating point might trap. */
2456 return honor_nans;
2458 case NEGATE_EXPR:
2459 case ABS_EXPR:
2460 case CONJ_EXPR:
2461 /* These operations don't trap with floating point. */
2462 if (honor_trapv)
2463 return true;
2464 return false;
2466 case PLUS_EXPR:
2467 case MINUS_EXPR:
2468 case MULT_EXPR:
2469 /* Any floating arithmetic may trap. */
2470 if (fp_operation && flag_trapping_math)
2471 return true;
2472 if (honor_trapv)
2473 return true;
2474 return false;
2476 case COMPLEX_EXPR:
2477 case CONSTRUCTOR:
2478 /* Constructing an object cannot trap. */
2479 return false;
2481 default:
2482 /* Any floating arithmetic may trap. */
2483 if (fp_operation && flag_trapping_math)
2484 return true;
2486 *handled = false;
2487 return false;
2491 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2492 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2493 type operands that may trap. If OP is a division operator, DIVISOR contains
2494 the value of the divisor. */
2496 bool
2497 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2498 tree divisor)
2500 bool honor_nans = (fp_operation && flag_trapping_math
2501 && !flag_finite_math_only);
2502 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2503 bool handled;
2505 if (TREE_CODE_CLASS (op) != tcc_comparison
2506 && TREE_CODE_CLASS (op) != tcc_unary
2507 && TREE_CODE_CLASS (op) != tcc_binary)
2508 return false;
2510 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2511 honor_nans, honor_snans, divisor,
2512 &handled);
2516 /* Returns true if it is possible to prove that the index of
2517 an array access REF (an ARRAY_REF expression) falls into the
2518 array bounds. */
2520 static bool
2521 in_array_bounds_p (tree ref)
2523 tree idx = TREE_OPERAND (ref, 1);
2524 tree min, max;
2526 if (TREE_CODE (idx) != INTEGER_CST)
2527 return false;
2529 min = array_ref_low_bound (ref);
2530 max = array_ref_up_bound (ref);
2531 if (!min
2532 || !max
2533 || TREE_CODE (min) != INTEGER_CST
2534 || TREE_CODE (max) != INTEGER_CST)
2535 return false;
2537 if (tree_int_cst_lt (idx, min)
2538 || tree_int_cst_lt (max, idx))
2539 return false;
2541 return true;
2544 /* Returns true if it is possible to prove that the range of
2545 an array access REF (an ARRAY_RANGE_REF expression) falls
2546 into the array bounds. */
2548 static bool
2549 range_in_array_bounds_p (tree ref)
2551 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2552 tree range_min, range_max, min, max;
2554 range_min = TYPE_MIN_VALUE (domain_type);
2555 range_max = TYPE_MAX_VALUE (domain_type);
2556 if (!range_min
2557 || !range_max
2558 || TREE_CODE (range_min) != INTEGER_CST
2559 || TREE_CODE (range_max) != INTEGER_CST)
2560 return false;
2562 min = array_ref_low_bound (ref);
2563 max = array_ref_up_bound (ref);
2564 if (!min
2565 || !max
2566 || TREE_CODE (min) != INTEGER_CST
2567 || TREE_CODE (max) != INTEGER_CST)
2568 return false;
2570 if (tree_int_cst_lt (range_min, min)
2571 || tree_int_cst_lt (max, range_max))
2572 return false;
2574 return true;
2577 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2578 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2579 This routine expects only GIMPLE lhs or rhs input. */
2581 bool
2582 tree_could_trap_p (tree expr)
2584 enum tree_code code;
2585 bool fp_operation = false;
2586 bool honor_trapv = false;
2587 tree t, base, div = NULL_TREE;
2589 if (!expr)
2590 return false;
2592 code = TREE_CODE (expr);
2593 t = TREE_TYPE (expr);
2595 if (t)
2597 if (COMPARISON_CLASS_P (expr))
2598 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2599 else
2600 fp_operation = FLOAT_TYPE_P (t);
2601 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2604 if (TREE_CODE_CLASS (code) == tcc_binary)
2605 div = TREE_OPERAND (expr, 1);
2606 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2607 return true;
2609 restart:
2610 switch (code)
2612 case COMPONENT_REF:
2613 case REALPART_EXPR:
2614 case IMAGPART_EXPR:
2615 case BIT_FIELD_REF:
2616 case VIEW_CONVERT_EXPR:
2617 case WITH_SIZE_EXPR:
2618 expr = TREE_OPERAND (expr, 0);
2619 code = TREE_CODE (expr);
2620 goto restart;
2622 case ARRAY_RANGE_REF:
2623 base = TREE_OPERAND (expr, 0);
2624 if (tree_could_trap_p (base))
2625 return true;
2626 if (TREE_THIS_NOTRAP (expr))
2627 return false;
2628 return !range_in_array_bounds_p (expr);
2630 case ARRAY_REF:
2631 base = TREE_OPERAND (expr, 0);
2632 if (tree_could_trap_p (base))
2633 return true;
2634 if (TREE_THIS_NOTRAP (expr))
2635 return false;
2636 return !in_array_bounds_p (expr);
2638 case TARGET_MEM_REF:
2639 case MEM_REF:
2640 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2641 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2642 return true;
2643 if (TREE_THIS_NOTRAP (expr))
2644 return false;
2645 /* We cannot prove that the access is in-bounds when we have
2646 variable-index TARGET_MEM_REFs. */
2647 if (code == TARGET_MEM_REF
2648 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2649 return true;
2650 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2652 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2653 double_int off = mem_ref_offset (expr);
2654 if (off.is_negative ())
2655 return true;
2656 if (TREE_CODE (base) == STRING_CST)
2657 return double_int::from_uhwi (TREE_STRING_LENGTH (base)).ule (off);
2658 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2659 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2660 || tree_to_double_int (DECL_SIZE_UNIT (base)).ule (off))
2661 return true;
2662 /* Now we are sure the first byte of the access is inside
2663 the object. */
2664 return false;
2666 return true;
2668 case INDIRECT_REF:
2669 return !TREE_THIS_NOTRAP (expr);
2671 case ASM_EXPR:
2672 return TREE_THIS_VOLATILE (expr);
2674 case CALL_EXPR:
2675 t = get_callee_fndecl (expr);
2676 /* Assume that calls to weak functions may trap. */
2677 if (!t || !DECL_P (t))
2678 return true;
2679 if (DECL_WEAK (t))
2680 return tree_could_trap_p (t);
2681 return false;
2683 case FUNCTION_DECL:
2684 /* Assume that accesses to weak functions may trap, unless we know
2685 they are certainly defined in current TU or in some other
2686 LTO partition. */
2687 if (DECL_WEAK (expr) && !DECL_COMDAT (expr))
2689 struct cgraph_node *node;
2690 if (!DECL_EXTERNAL (expr))
2691 return false;
2692 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2693 if (node && node->in_other_partition)
2694 return false;
2695 return true;
2697 return false;
2699 case VAR_DECL:
2700 /* Assume that accesses to weak vars may trap, unless we know
2701 they are certainly defined in current TU or in some other
2702 LTO partition. */
2703 if (DECL_WEAK (expr) && !DECL_COMDAT (expr))
2705 varpool_node *node;
2706 if (!DECL_EXTERNAL (expr))
2707 return false;
2708 node = varpool_variable_node (varpool_get_node (expr), NULL);
2709 if (node && node->in_other_partition)
2710 return false;
2711 return true;
2713 return false;
2715 default:
2716 return false;
2721 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2722 an assignment or a conditional) may throw. */
2724 static bool
2725 stmt_could_throw_1_p (gimple stmt)
2727 enum tree_code code = gimple_expr_code (stmt);
2728 bool honor_nans = false;
2729 bool honor_snans = false;
2730 bool fp_operation = false;
2731 bool honor_trapv = false;
2732 tree t;
2733 size_t i;
2734 bool handled, ret;
2736 if (TREE_CODE_CLASS (code) == tcc_comparison
2737 || TREE_CODE_CLASS (code) == tcc_unary
2738 || TREE_CODE_CLASS (code) == tcc_binary)
2740 if (is_gimple_assign (stmt)
2741 && TREE_CODE_CLASS (code) == tcc_comparison)
2742 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2743 else if (gimple_code (stmt) == GIMPLE_COND)
2744 t = TREE_TYPE (gimple_cond_lhs (stmt));
2745 else
2746 t = gimple_expr_type (stmt);
2747 fp_operation = FLOAT_TYPE_P (t);
2748 if (fp_operation)
2750 honor_nans = flag_trapping_math && !flag_finite_math_only;
2751 honor_snans = flag_signaling_nans != 0;
2753 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2754 honor_trapv = true;
2757 /* Check if the main expression may trap. */
2758 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2759 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2760 honor_nans, honor_snans, t,
2761 &handled);
2762 if (handled)
2763 return ret;
2765 /* If the expression does not trap, see if any of the individual operands may
2766 trap. */
2767 for (i = 0; i < gimple_num_ops (stmt); i++)
2768 if (tree_could_trap_p (gimple_op (stmt, i)))
2769 return true;
2771 return false;
2775 /* Return true if statement STMT could throw an exception. */
2777 bool
2778 stmt_could_throw_p (gimple stmt)
2780 if (!flag_exceptions)
2781 return false;
2783 /* The only statements that can throw an exception are assignments,
2784 conditionals, calls, resx, and asms. */
2785 switch (gimple_code (stmt))
2787 case GIMPLE_RESX:
2788 return true;
2790 case GIMPLE_CALL:
2791 return !gimple_call_nothrow_p (stmt);
2793 case GIMPLE_ASSIGN:
2794 case GIMPLE_COND:
2795 if (!cfun->can_throw_non_call_exceptions)
2796 return false;
2797 return stmt_could_throw_1_p (stmt);
2799 case GIMPLE_ASM:
2800 if (!cfun->can_throw_non_call_exceptions)
2801 return false;
2802 return gimple_asm_volatile_p (stmt);
2804 default:
2805 return false;
2810 /* Return true if expression T could throw an exception. */
2812 bool
2813 tree_could_throw_p (tree t)
2815 if (!flag_exceptions)
2816 return false;
2817 if (TREE_CODE (t) == MODIFY_EXPR)
2819 if (cfun->can_throw_non_call_exceptions
2820 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2821 return true;
2822 t = TREE_OPERAND (t, 1);
2825 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2826 t = TREE_OPERAND (t, 0);
2827 if (TREE_CODE (t) == CALL_EXPR)
2828 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2829 if (cfun->can_throw_non_call_exceptions)
2830 return tree_could_trap_p (t);
2831 return false;
2834 /* Return true if STMT can throw an exception that is not caught within
2835 the current function (CFUN). */
2837 bool
2838 stmt_can_throw_external (gimple stmt)
2840 int lp_nr;
2842 if (!stmt_could_throw_p (stmt))
2843 return false;
2845 lp_nr = lookup_stmt_eh_lp (stmt);
2846 return lp_nr == 0;
2849 /* Return true if STMT can throw an exception that is caught within
2850 the current function (CFUN). */
2852 bool
2853 stmt_can_throw_internal (gimple stmt)
2855 int lp_nr;
2857 if (!stmt_could_throw_p (stmt))
2858 return false;
2860 lp_nr = lookup_stmt_eh_lp (stmt);
2861 return lp_nr > 0;
2864 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2865 remove any entry it might have from the EH table. Return true if
2866 any change was made. */
2868 bool
2869 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2871 if (stmt_could_throw_p (stmt))
2872 return false;
2873 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2876 /* Likewise, but always use the current function. */
2878 bool
2879 maybe_clean_eh_stmt (gimple stmt)
2881 return maybe_clean_eh_stmt_fn (cfun, stmt);
2884 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2885 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2886 in the table if it should be in there. Return TRUE if a replacement was
2887 done that my require an EH edge purge. */
2889 bool
2890 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2892 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2894 if (lp_nr != 0)
2896 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2898 if (new_stmt == old_stmt && new_stmt_could_throw)
2899 return false;
2901 remove_stmt_from_eh_lp (old_stmt);
2902 if (new_stmt_could_throw)
2904 add_stmt_to_eh_lp (new_stmt, lp_nr);
2905 return false;
2907 else
2908 return true;
2911 return false;
2914 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2915 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2916 operand is the return value of duplicate_eh_regions. */
2918 bool
2919 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2920 struct function *old_fun, gimple old_stmt,
2921 struct pointer_map_t *map, int default_lp_nr)
2923 int old_lp_nr, new_lp_nr;
2924 void **slot;
2926 if (!stmt_could_throw_p (new_stmt))
2927 return false;
2929 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2930 if (old_lp_nr == 0)
2932 if (default_lp_nr == 0)
2933 return false;
2934 new_lp_nr = default_lp_nr;
2936 else if (old_lp_nr > 0)
2938 eh_landing_pad old_lp, new_lp;
2940 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2941 slot = pointer_map_contains (map, old_lp);
2942 new_lp = (eh_landing_pad) *slot;
2943 new_lp_nr = new_lp->index;
2945 else
2947 eh_region old_r, new_r;
2949 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2950 slot = pointer_map_contains (map, old_r);
2951 new_r = (eh_region) *slot;
2952 new_lp_nr = -new_r->index;
2955 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2956 return true;
2959 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2960 and thus no remapping is required. */
2962 bool
2963 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2965 int lp_nr;
2967 if (!stmt_could_throw_p (new_stmt))
2968 return false;
2970 lp_nr = lookup_stmt_eh_lp (old_stmt);
2971 if (lp_nr == 0)
2972 return false;
2974 add_stmt_to_eh_lp (new_stmt, lp_nr);
2975 return true;
2978 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2979 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2980 this only handles handlers consisting of a single call, as that's the
2981 important case for C++: a destructor call for a particular object showing
2982 up in multiple handlers. */
2984 static bool
2985 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2987 gimple_stmt_iterator gsi;
2988 gimple ones, twos;
2989 unsigned int ai;
2991 gsi = gsi_start (oneh);
2992 if (!gsi_one_before_end_p (gsi))
2993 return false;
2994 ones = gsi_stmt (gsi);
2996 gsi = gsi_start (twoh);
2997 if (!gsi_one_before_end_p (gsi))
2998 return false;
2999 twos = gsi_stmt (gsi);
3001 if (!is_gimple_call (ones)
3002 || !is_gimple_call (twos)
3003 || gimple_call_lhs (ones)
3004 || gimple_call_lhs (twos)
3005 || gimple_call_chain (ones)
3006 || gimple_call_chain (twos)
3007 || !gimple_call_same_target_p (ones, twos)
3008 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3009 return false;
3011 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3012 if (!operand_equal_p (gimple_call_arg (ones, ai),
3013 gimple_call_arg (twos, ai), 0))
3014 return false;
3016 return true;
3019 /* Optimize
3020 try { A() } finally { try { ~B() } catch { ~A() } }
3021 try { ... } finally { ~A() }
3022 into
3023 try { A() } catch { ~B() }
3024 try { ~B() ... } finally { ~A() }
3026 This occurs frequently in C++, where A is a local variable and B is a
3027 temporary used in the initializer for A. */
3029 static void
3030 optimize_double_finally (gimple one, gimple two)
3032 gimple oneh;
3033 gimple_stmt_iterator gsi;
3034 gimple_seq cleanup;
3036 cleanup = gimple_try_cleanup (one);
3037 gsi = gsi_start (cleanup);
3038 if (!gsi_one_before_end_p (gsi))
3039 return;
3041 oneh = gsi_stmt (gsi);
3042 if (gimple_code (oneh) != GIMPLE_TRY
3043 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3044 return;
3046 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3048 gimple_seq seq = gimple_try_eval (oneh);
3050 gimple_try_set_cleanup (one, seq);
3051 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3052 seq = copy_gimple_seq_and_replace_locals (seq);
3053 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3054 gimple_try_set_eval (two, seq);
3058 /* Perform EH refactoring optimizations that are simpler to do when code
3059 flow has been lowered but EH structures haven't. */
3061 static void
3062 refactor_eh_r (gimple_seq seq)
3064 gimple_stmt_iterator gsi;
3065 gimple one, two;
3067 one = NULL;
3068 two = NULL;
3069 gsi = gsi_start (seq);
3070 while (1)
3072 one = two;
3073 if (gsi_end_p (gsi))
3074 two = NULL;
3075 else
3076 two = gsi_stmt (gsi);
3077 if (one
3078 && two
3079 && gimple_code (one) == GIMPLE_TRY
3080 && gimple_code (two) == GIMPLE_TRY
3081 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
3082 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
3083 optimize_double_finally (one, two);
3084 if (one)
3085 switch (gimple_code (one))
3087 case GIMPLE_TRY:
3088 refactor_eh_r (gimple_try_eval (one));
3089 refactor_eh_r (gimple_try_cleanup (one));
3090 break;
3091 case GIMPLE_CATCH:
3092 refactor_eh_r (gimple_catch_handler (one));
3093 break;
3094 case GIMPLE_EH_FILTER:
3095 refactor_eh_r (gimple_eh_filter_failure (one));
3096 break;
3097 case GIMPLE_EH_ELSE:
3098 refactor_eh_r (gimple_eh_else_n_body (one));
3099 refactor_eh_r (gimple_eh_else_e_body (one));
3100 break;
3101 default:
3102 break;
3104 if (two)
3105 gsi_next (&gsi);
3106 else
3107 break;
3111 namespace {
3113 const pass_data pass_data_refactor_eh =
3115 GIMPLE_PASS, /* type */
3116 "ehopt", /* name */
3117 OPTGROUP_NONE, /* optinfo_flags */
3118 true, /* has_execute */
3119 TV_TREE_EH, /* tv_id */
3120 PROP_gimple_lcf, /* properties_required */
3121 0, /* properties_provided */
3122 0, /* properties_destroyed */
3123 0, /* todo_flags_start */
3124 0, /* todo_flags_finish */
3127 class pass_refactor_eh : public gimple_opt_pass
3129 public:
3130 pass_refactor_eh (gcc::context *ctxt)
3131 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3134 /* opt_pass methods: */
3135 virtual bool gate (function *) { return flag_exceptions != 0; }
3136 virtual unsigned int execute (function *)
3138 refactor_eh_r (gimple_body (current_function_decl));
3139 return 0;
3142 }; // class pass_refactor_eh
3144 } // anon namespace
3146 gimple_opt_pass *
3147 make_pass_refactor_eh (gcc::context *ctxt)
3149 return new pass_refactor_eh (ctxt);
3152 /* At the end of gimple optimization, we can lower RESX. */
3154 static bool
3155 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
3157 int lp_nr;
3158 eh_region src_r, dst_r;
3159 gimple_stmt_iterator gsi;
3160 gimple x;
3161 tree fn, src_nr;
3162 bool ret = false;
3164 lp_nr = lookup_stmt_eh_lp (stmt);
3165 if (lp_nr != 0)
3166 dst_r = get_eh_region_from_lp_number (lp_nr);
3167 else
3168 dst_r = NULL;
3170 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3171 gsi = gsi_last_bb (bb);
3173 if (src_r == NULL)
3175 /* We can wind up with no source region when pass_cleanup_eh shows
3176 that there are no entries into an eh region and deletes it, but
3177 then the block that contains the resx isn't removed. This can
3178 happen without optimization when the switch statement created by
3179 lower_try_finally_switch isn't simplified to remove the eh case.
3181 Resolve this by expanding the resx node to an abort. */
3183 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3184 x = gimple_build_call (fn, 0);
3185 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3187 while (EDGE_COUNT (bb->succs) > 0)
3188 remove_edge (EDGE_SUCC (bb, 0));
3190 else if (dst_r)
3192 /* When we have a destination region, we resolve this by copying
3193 the excptr and filter values into place, and changing the edge
3194 to immediately after the landing pad. */
3195 edge e;
3197 if (lp_nr < 0)
3199 basic_block new_bb;
3200 void **slot;
3201 tree lab;
3203 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3204 the failure decl into a new block, if needed. */
3205 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3207 slot = pointer_map_contains (mnt_map, dst_r);
3208 if (slot == NULL)
3210 gimple_stmt_iterator gsi2;
3212 new_bb = create_empty_bb (bb);
3213 if (current_loops)
3214 add_bb_to_loop (new_bb, bb->loop_father);
3215 lab = gimple_block_label (new_bb);
3216 gsi2 = gsi_start_bb (new_bb);
3218 fn = dst_r->u.must_not_throw.failure_decl;
3219 x = gimple_build_call (fn, 0);
3220 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3221 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3223 slot = pointer_map_insert (mnt_map, dst_r);
3224 *slot = lab;
3226 else
3228 lab = (tree) *slot;
3229 new_bb = label_to_block (lab);
3232 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3233 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3234 e->count = bb->count;
3235 e->probability = REG_BR_PROB_BASE;
3237 else
3239 edge_iterator ei;
3240 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3242 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3243 src_nr = build_int_cst (integer_type_node, src_r->index);
3244 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3245 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3247 /* Update the flags for the outgoing edge. */
3248 e = single_succ_edge (bb);
3249 gcc_assert (e->flags & EDGE_EH);
3250 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3252 /* If there are no more EH users of the landing pad, delete it. */
3253 FOR_EACH_EDGE (e, ei, e->dest->preds)
3254 if (e->flags & EDGE_EH)
3255 break;
3256 if (e == NULL)
3258 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3259 remove_eh_landing_pad (lp);
3263 ret = true;
3265 else
3267 tree var;
3269 /* When we don't have a destination region, this exception escapes
3270 up the call chain. We resolve this by generating a call to the
3271 _Unwind_Resume library function. */
3273 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3274 with no arguments for C++ and Java. Check for that. */
3275 if (src_r->use_cxa_end_cleanup)
3277 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3278 x = gimple_build_call (fn, 0);
3279 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3281 else
3283 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3284 src_nr = build_int_cst (integer_type_node, src_r->index);
3285 x = gimple_build_call (fn, 1, src_nr);
3286 var = create_tmp_var (ptr_type_node, NULL);
3287 var = make_ssa_name (var, x);
3288 gimple_call_set_lhs (x, var);
3289 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3291 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3292 x = gimple_build_call (fn, 1, var);
3293 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3296 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3299 gsi_remove (&gsi, true);
3301 return ret;
3304 namespace {
3306 const pass_data pass_data_lower_resx =
3308 GIMPLE_PASS, /* type */
3309 "resx", /* name */
3310 OPTGROUP_NONE, /* optinfo_flags */
3311 true, /* has_execute */
3312 TV_TREE_EH, /* tv_id */
3313 PROP_gimple_lcf, /* properties_required */
3314 0, /* properties_provided */
3315 0, /* properties_destroyed */
3316 0, /* todo_flags_start */
3317 TODO_verify_flow, /* todo_flags_finish */
3320 class pass_lower_resx : public gimple_opt_pass
3322 public:
3323 pass_lower_resx (gcc::context *ctxt)
3324 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3327 /* opt_pass methods: */
3328 virtual bool gate (function *) { return flag_exceptions != 0; }
3329 virtual unsigned int execute (function *);
3331 }; // class pass_lower_resx
3333 unsigned
3334 pass_lower_resx::execute (function *fun)
3336 basic_block bb;
3337 struct pointer_map_t *mnt_map;
3338 bool dominance_invalidated = false;
3339 bool any_rewritten = false;
3341 mnt_map = pointer_map_create ();
3343 FOR_EACH_BB_FN (bb, fun)
3345 gimple last = last_stmt (bb);
3346 if (last && is_gimple_resx (last))
3348 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3349 any_rewritten = true;
3353 pointer_map_destroy (mnt_map);
3355 if (dominance_invalidated)
3357 free_dominance_info (CDI_DOMINATORS);
3358 free_dominance_info (CDI_POST_DOMINATORS);
3361 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3364 } // anon namespace
3366 gimple_opt_pass *
3367 make_pass_lower_resx (gcc::context *ctxt)
3369 return new pass_lower_resx (ctxt);
3372 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3373 external throw. */
3375 static void
3376 optimize_clobbers (basic_block bb)
3378 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3379 bool any_clobbers = false;
3380 bool seen_stack_restore = false;
3381 edge_iterator ei;
3382 edge e;
3384 /* Only optimize anything if the bb contains at least one clobber,
3385 ends with resx (checked by caller), optionally contains some
3386 debug stmts or labels, or at most one __builtin_stack_restore
3387 call, and has an incoming EH edge. */
3388 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3390 gimple stmt = gsi_stmt (gsi);
3391 if (is_gimple_debug (stmt))
3392 continue;
3393 if (gimple_clobber_p (stmt))
3395 any_clobbers = true;
3396 continue;
3398 if (!seen_stack_restore
3399 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3401 seen_stack_restore = true;
3402 continue;
3404 if (gimple_code (stmt) == GIMPLE_LABEL)
3405 break;
3406 return;
3408 if (!any_clobbers)
3409 return;
3410 FOR_EACH_EDGE (e, ei, bb->preds)
3411 if (e->flags & EDGE_EH)
3412 break;
3413 if (e == NULL)
3414 return;
3415 gsi = gsi_last_bb (bb);
3416 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3418 gimple stmt = gsi_stmt (gsi);
3419 if (!gimple_clobber_p (stmt))
3420 continue;
3421 unlink_stmt_vdef (stmt);
3422 gsi_remove (&gsi, true);
3423 release_defs (stmt);
3427 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3428 internal throw to successor BB. */
3430 static int
3431 sink_clobbers (basic_block bb)
3433 edge e;
3434 edge_iterator ei;
3435 gimple_stmt_iterator gsi, dgsi;
3436 basic_block succbb;
3437 bool any_clobbers = false;
3438 unsigned todo = 0;
3440 /* Only optimize if BB has a single EH successor and
3441 all predecessor edges are EH too. */
3442 if (!single_succ_p (bb)
3443 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3444 return 0;
3446 FOR_EACH_EDGE (e, ei, bb->preds)
3448 if ((e->flags & EDGE_EH) == 0)
3449 return 0;
3452 /* And BB contains only CLOBBER stmts before the final
3453 RESX. */
3454 gsi = gsi_last_bb (bb);
3455 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3457 gimple stmt = gsi_stmt (gsi);
3458 if (is_gimple_debug (stmt))
3459 continue;
3460 if (gimple_code (stmt) == GIMPLE_LABEL)
3461 break;
3462 if (!gimple_clobber_p (stmt))
3463 return 0;
3464 any_clobbers = true;
3466 if (!any_clobbers)
3467 return 0;
3469 edge succe = single_succ_edge (bb);
3470 succbb = succe->dest;
3472 /* See if there is a virtual PHI node to take an updated virtual
3473 operand from. */
3474 gimple vphi = NULL;
3475 tree vuse = NULL_TREE;
3476 for (gsi = gsi_start_phis (succbb); !gsi_end_p (gsi); gsi_next (&gsi))
3478 tree res = gimple_phi_result (gsi_stmt (gsi));
3479 if (virtual_operand_p (res))
3481 vphi = gsi_stmt (gsi);
3482 vuse = res;
3483 break;
3487 dgsi = gsi_after_labels (succbb);
3488 gsi = gsi_last_bb (bb);
3489 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3491 gimple stmt = gsi_stmt (gsi);
3492 tree lhs;
3493 if (is_gimple_debug (stmt))
3494 continue;
3495 if (gimple_code (stmt) == GIMPLE_LABEL)
3496 break;
3497 lhs = gimple_assign_lhs (stmt);
3498 /* Unfortunately we don't have dominance info updated at this
3499 point, so checking if
3500 dominated_by_p (CDI_DOMINATORS, succbb,
3501 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3502 would be too costly. Thus, avoid sinking any clobbers that
3503 refer to non-(D) SSA_NAMEs. */
3504 if (TREE_CODE (lhs) == MEM_REF
3505 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3506 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3508 unlink_stmt_vdef (stmt);
3509 gsi_remove (&gsi, true);
3510 release_defs (stmt);
3511 continue;
3514 /* As we do not change stmt order when sinking across a
3515 forwarder edge we can keep virtual operands in place. */
3516 gsi_remove (&gsi, false);
3517 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3519 /* But adjust virtual operands if we sunk across a PHI node. */
3520 if (vuse)
3522 gimple use_stmt;
3523 imm_use_iterator iter;
3524 use_operand_p use_p;
3525 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3526 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3527 SET_USE (use_p, gimple_vdef (stmt));
3528 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3530 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3531 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3533 /* Adjust the incoming virtual operand. */
3534 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3535 SET_USE (gimple_vuse_op (stmt), vuse);
3537 /* If there isn't a single predecessor but no virtual PHI node
3538 arrange for virtual operands to be renamed. */
3539 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3540 && !single_pred_p (succbb))
3542 /* In this case there will be no use of the VDEF of this stmt.
3543 ??? Unless this is a secondary opportunity and we have not
3544 removed unreachable blocks yet, so we cannot assert this.
3545 Which also means we will end up renaming too many times. */
3546 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3547 mark_virtual_operands_for_renaming (cfun);
3548 todo |= TODO_update_ssa_only_virtuals;
3552 return todo;
3555 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3556 we have found some duplicate labels and removed some edges. */
3558 static bool
3559 lower_eh_dispatch (basic_block src, gimple stmt)
3561 gimple_stmt_iterator gsi;
3562 int region_nr;
3563 eh_region r;
3564 tree filter, fn;
3565 gimple x;
3566 bool redirected = false;
3568 region_nr = gimple_eh_dispatch_region (stmt);
3569 r = get_eh_region_from_number (region_nr);
3571 gsi = gsi_last_bb (src);
3573 switch (r->type)
3575 case ERT_TRY:
3577 auto_vec<tree> labels;
3578 tree default_label = NULL;
3579 eh_catch c;
3580 edge_iterator ei;
3581 edge e;
3582 struct pointer_set_t *seen_values = pointer_set_create ();
3584 /* Collect the labels for a switch. Zero the post_landing_pad
3585 field becase we'll no longer have anything keeping these labels
3586 in existence and the optimizer will be free to merge these
3587 blocks at will. */
3588 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3590 tree tp_node, flt_node, lab = c->label;
3591 bool have_label = false;
3593 c->label = NULL;
3594 tp_node = c->type_list;
3595 flt_node = c->filter_list;
3597 if (tp_node == NULL)
3599 default_label = lab;
3600 break;
3604 /* Filter out duplicate labels that arise when this handler
3605 is shadowed by an earlier one. When no labels are
3606 attached to the handler anymore, we remove
3607 the corresponding edge and then we delete unreachable
3608 blocks at the end of this pass. */
3609 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3611 tree t = build_case_label (TREE_VALUE (flt_node),
3612 NULL, lab);
3613 labels.safe_push (t);
3614 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3615 have_label = true;
3618 tp_node = TREE_CHAIN (tp_node);
3619 flt_node = TREE_CHAIN (flt_node);
3621 while (tp_node);
3622 if (! have_label)
3624 remove_edge (find_edge (src, label_to_block (lab)));
3625 redirected = true;
3629 /* Clean up the edge flags. */
3630 FOR_EACH_EDGE (e, ei, src->succs)
3632 if (e->flags & EDGE_FALLTHRU)
3634 /* If there was no catch-all, use the fallthru edge. */
3635 if (default_label == NULL)
3636 default_label = gimple_block_label (e->dest);
3637 e->flags &= ~EDGE_FALLTHRU;
3640 gcc_assert (default_label != NULL);
3642 /* Don't generate a switch if there's only a default case.
3643 This is common in the form of try { A; } catch (...) { B; }. */
3644 if (!labels.exists ())
3646 e = single_succ_edge (src);
3647 e->flags |= EDGE_FALLTHRU;
3649 else
3651 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3652 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3653 region_nr));
3654 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3655 filter = make_ssa_name (filter, x);
3656 gimple_call_set_lhs (x, filter);
3657 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3659 /* Turn the default label into a default case. */
3660 default_label = build_case_label (NULL, NULL, default_label);
3661 sort_case_labels (labels);
3663 x = gimple_build_switch (filter, default_label, labels);
3664 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3666 pointer_set_destroy (seen_values);
3668 break;
3670 case ERT_ALLOWED_EXCEPTIONS:
3672 edge b_e = BRANCH_EDGE (src);
3673 edge f_e = FALLTHRU_EDGE (src);
3675 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3676 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3677 region_nr));
3678 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3679 filter = make_ssa_name (filter, x);
3680 gimple_call_set_lhs (x, filter);
3681 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3683 r->u.allowed.label = NULL;
3684 x = gimple_build_cond (EQ_EXPR, filter,
3685 build_int_cst (TREE_TYPE (filter),
3686 r->u.allowed.filter),
3687 NULL_TREE, NULL_TREE);
3688 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3690 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3691 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3693 break;
3695 default:
3696 gcc_unreachable ();
3699 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3700 gsi_remove (&gsi, true);
3701 return redirected;
3704 namespace {
3706 const pass_data pass_data_lower_eh_dispatch =
3708 GIMPLE_PASS, /* type */
3709 "ehdisp", /* name */
3710 OPTGROUP_NONE, /* optinfo_flags */
3711 true, /* has_execute */
3712 TV_TREE_EH, /* tv_id */
3713 PROP_gimple_lcf, /* properties_required */
3714 0, /* properties_provided */
3715 0, /* properties_destroyed */
3716 0, /* todo_flags_start */
3717 TODO_verify_flow, /* todo_flags_finish */
3720 class pass_lower_eh_dispatch : public gimple_opt_pass
3722 public:
3723 pass_lower_eh_dispatch (gcc::context *ctxt)
3724 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3727 /* opt_pass methods: */
3728 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3729 virtual unsigned int execute (function *);
3731 }; // class pass_lower_eh_dispatch
3733 unsigned
3734 pass_lower_eh_dispatch::execute (function *fun)
3736 basic_block bb;
3737 int flags = 0;
3738 bool redirected = false;
3740 assign_filter_values ();
3742 FOR_EACH_BB_FN (bb, fun)
3744 gimple last = last_stmt (bb);
3745 if (last == NULL)
3746 continue;
3747 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3749 redirected |= lower_eh_dispatch (bb, last);
3750 flags |= TODO_update_ssa_only_virtuals;
3752 else if (gimple_code (last) == GIMPLE_RESX)
3754 if (stmt_can_throw_external (last))
3755 optimize_clobbers (bb);
3756 else
3757 flags |= sink_clobbers (bb);
3761 if (redirected)
3762 delete_unreachable_blocks ();
3763 return flags;
3766 } // anon namespace
3768 gimple_opt_pass *
3769 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3771 return new pass_lower_eh_dispatch (ctxt);
3774 /* Walk statements, see what regions and, optionally, landing pads
3775 are really referenced.
3777 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3778 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3780 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3781 regions are marked.
3783 The caller is responsible for freeing the returned sbitmaps. */
3785 static void
3786 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3788 sbitmap r_reachable, lp_reachable;
3789 basic_block bb;
3790 bool mark_landing_pads = (lp_reachablep != NULL);
3791 gcc_checking_assert (r_reachablep != NULL);
3793 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3794 bitmap_clear (r_reachable);
3795 *r_reachablep = r_reachable;
3797 if (mark_landing_pads)
3799 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3800 bitmap_clear (lp_reachable);
3801 *lp_reachablep = lp_reachable;
3803 else
3804 lp_reachable = NULL;
3806 FOR_EACH_BB_FN (bb, cfun)
3808 gimple_stmt_iterator gsi;
3810 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3812 gimple stmt = gsi_stmt (gsi);
3814 if (mark_landing_pads)
3816 int lp_nr = lookup_stmt_eh_lp (stmt);
3818 /* Negative LP numbers are MUST_NOT_THROW regions which
3819 are not considered BB enders. */
3820 if (lp_nr < 0)
3821 bitmap_set_bit (r_reachable, -lp_nr);
3823 /* Positive LP numbers are real landing pads, and BB enders. */
3824 else if (lp_nr > 0)
3826 gcc_assert (gsi_one_before_end_p (gsi));
3827 eh_region region = get_eh_region_from_lp_number (lp_nr);
3828 bitmap_set_bit (r_reachable, region->index);
3829 bitmap_set_bit (lp_reachable, lp_nr);
3833 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3834 switch (gimple_code (stmt))
3836 case GIMPLE_RESX:
3837 bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
3838 break;
3839 case GIMPLE_EH_DISPATCH:
3840 bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
3841 break;
3842 default:
3843 break;
3849 /* Remove unreachable handlers and unreachable landing pads. */
3851 static void
3852 remove_unreachable_handlers (void)
3854 sbitmap r_reachable, lp_reachable;
3855 eh_region region;
3856 eh_landing_pad lp;
3857 unsigned i;
3859 mark_reachable_handlers (&r_reachable, &lp_reachable);
3861 if (dump_file)
3863 fprintf (dump_file, "Before removal of unreachable regions:\n");
3864 dump_eh_tree (dump_file, cfun);
3865 fprintf (dump_file, "Reachable regions: ");
3866 dump_bitmap_file (dump_file, r_reachable);
3867 fprintf (dump_file, "Reachable landing pads: ");
3868 dump_bitmap_file (dump_file, lp_reachable);
3871 if (dump_file)
3873 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3874 if (region && !bitmap_bit_p (r_reachable, region->index))
3875 fprintf (dump_file,
3876 "Removing unreachable region %d\n",
3877 region->index);
3880 remove_unreachable_eh_regions (r_reachable);
3882 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3883 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3885 if (dump_file)
3886 fprintf (dump_file,
3887 "Removing unreachable landing pad %d\n",
3888 lp->index);
3889 remove_eh_landing_pad (lp);
3892 if (dump_file)
3894 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3895 dump_eh_tree (dump_file, cfun);
3896 fprintf (dump_file, "\n\n");
3899 sbitmap_free (r_reachable);
3900 sbitmap_free (lp_reachable);
3902 #ifdef ENABLE_CHECKING
3903 verify_eh_tree (cfun);
3904 #endif
3907 /* Remove unreachable handlers if any landing pads have been removed after
3908 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3910 void
3911 maybe_remove_unreachable_handlers (void)
3913 eh_landing_pad lp;
3914 unsigned i;
3916 if (cfun->eh == NULL)
3917 return;
3919 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3920 if (lp && lp->post_landing_pad)
3922 if (label_to_block (lp->post_landing_pad) == NULL)
3924 remove_unreachable_handlers ();
3925 return;
3930 /* Remove regions that do not have landing pads. This assumes
3931 that remove_unreachable_handlers has already been run, and
3932 that we've just manipulated the landing pads since then.
3934 Preserve regions with landing pads and regions that prevent
3935 exceptions from propagating further, even if these regions
3936 are not reachable. */
3938 static void
3939 remove_unreachable_handlers_no_lp (void)
3941 eh_region region;
3942 sbitmap r_reachable;
3943 unsigned i;
3945 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3947 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3949 if (! region)
3950 continue;
3952 if (region->landing_pads != NULL
3953 || region->type == ERT_MUST_NOT_THROW)
3954 bitmap_set_bit (r_reachable, region->index);
3956 if (dump_file
3957 && !bitmap_bit_p (r_reachable, region->index))
3958 fprintf (dump_file,
3959 "Removing unreachable region %d\n",
3960 region->index);
3963 remove_unreachable_eh_regions (r_reachable);
3965 sbitmap_free (r_reachable);
3968 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3969 optimisticaly split all sorts of edges, including EH edges. The
3970 optimization passes in between may not have needed them; if not,
3971 we should undo the split.
3973 Recognize this case by having one EH edge incoming to the BB and
3974 one normal edge outgoing; BB should be empty apart from the
3975 post_landing_pad label.
3977 Note that this is slightly different from the empty handler case
3978 handled by cleanup_empty_eh, in that the actual handler may yet
3979 have actual code but the landing pad has been separated from the
3980 handler. As such, cleanup_empty_eh relies on this transformation
3981 having been done first. */
3983 static bool
3984 unsplit_eh (eh_landing_pad lp)
3986 basic_block bb = label_to_block (lp->post_landing_pad);
3987 gimple_stmt_iterator gsi;
3988 edge e_in, e_out;
3990 /* Quickly check the edge counts on BB for singularity. */
3991 if (!single_pred_p (bb) || !single_succ_p (bb))
3992 return false;
3993 e_in = single_pred_edge (bb);
3994 e_out = single_succ_edge (bb);
3996 /* Input edge must be EH and output edge must be normal. */
3997 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3998 return false;
4000 /* The block must be empty except for the labels and debug insns. */
4001 gsi = gsi_after_labels (bb);
4002 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4003 gsi_next_nondebug (&gsi);
4004 if (!gsi_end_p (gsi))
4005 return false;
4007 /* The destination block must not already have a landing pad
4008 for a different region. */
4009 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4011 gimple stmt = gsi_stmt (gsi);
4012 tree lab;
4013 int lp_nr;
4015 if (gimple_code (stmt) != GIMPLE_LABEL)
4016 break;
4017 lab = gimple_label_label (stmt);
4018 lp_nr = EH_LANDING_PAD_NR (lab);
4019 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4020 return false;
4023 /* The new destination block must not already be a destination of
4024 the source block, lest we merge fallthru and eh edges and get
4025 all sorts of confused. */
4026 if (find_edge (e_in->src, e_out->dest))
4027 return false;
4029 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4030 thought this should have been cleaned up by a phicprop pass, but
4031 that doesn't appear to handle virtuals. Propagate by hand. */
4032 if (!gimple_seq_empty_p (phi_nodes (bb)))
4034 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
4036 gimple use_stmt, phi = gsi_stmt (gsi);
4037 tree lhs = gimple_phi_result (phi);
4038 tree rhs = gimple_phi_arg_def (phi, 0);
4039 use_operand_p use_p;
4040 imm_use_iterator iter;
4042 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4044 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4045 SET_USE (use_p, rhs);
4048 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4049 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4051 remove_phi_node (&gsi, true);
4055 if (dump_file && (dump_flags & TDF_DETAILS))
4056 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4057 lp->index, e_out->dest->index);
4059 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4060 a successor edge, humor it. But do the real CFG change with the
4061 predecessor of E_OUT in order to preserve the ordering of arguments
4062 to the PHI nodes in E_OUT->DEST. */
4063 redirect_eh_edge_1 (e_in, e_out->dest, false);
4064 redirect_edge_pred (e_out, e_in->src);
4065 e_out->flags = e_in->flags;
4066 e_out->probability = e_in->probability;
4067 e_out->count = e_in->count;
4068 remove_edge (e_in);
4070 return true;
4073 /* Examine each landing pad block and see if it matches unsplit_eh. */
4075 static bool
4076 unsplit_all_eh (void)
4078 bool changed = false;
4079 eh_landing_pad lp;
4080 int i;
4082 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4083 if (lp)
4084 changed |= unsplit_eh (lp);
4086 return changed;
4089 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4090 to OLD_BB to NEW_BB; return true on success, false on failure.
4092 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4093 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4094 Virtual PHIs may be deleted and marked for renaming. */
4096 static bool
4097 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4098 edge old_bb_out, bool change_region)
4100 gimple_stmt_iterator ngsi, ogsi;
4101 edge_iterator ei;
4102 edge e;
4103 bitmap ophi_handled;
4105 /* The destination block must not be a regular successor for any
4106 of the preds of the landing pad. Thus, avoid turning
4107 <..>
4108 | \ EH
4109 | <..>
4111 <..>
4112 into
4113 <..>
4114 | | EH
4115 <..>
4116 which CFG verification would choke on. See PR45172 and PR51089. */
4117 FOR_EACH_EDGE (e, ei, old_bb->preds)
4118 if (find_edge (e->src, new_bb))
4119 return false;
4121 FOR_EACH_EDGE (e, ei, old_bb->preds)
4122 redirect_edge_var_map_clear (e);
4124 ophi_handled = BITMAP_ALLOC (NULL);
4126 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4127 for the edges we're going to move. */
4128 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4130 gimple ophi, nphi = gsi_stmt (ngsi);
4131 tree nresult, nop;
4133 nresult = gimple_phi_result (nphi);
4134 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4136 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4137 the source ssa_name. */
4138 ophi = NULL;
4139 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4141 ophi = gsi_stmt (ogsi);
4142 if (gimple_phi_result (ophi) == nop)
4143 break;
4144 ophi = NULL;
4147 /* If we did find the corresponding PHI, copy those inputs. */
4148 if (ophi)
4150 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4151 if (!has_single_use (nop))
4153 imm_use_iterator imm_iter;
4154 use_operand_p use_p;
4156 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4158 if (!gimple_debug_bind_p (USE_STMT (use_p))
4159 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4160 || gimple_bb (USE_STMT (use_p)) != new_bb))
4161 goto fail;
4164 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4165 FOR_EACH_EDGE (e, ei, old_bb->preds)
4167 location_t oloc;
4168 tree oop;
4170 if ((e->flags & EDGE_EH) == 0)
4171 continue;
4172 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4173 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4174 redirect_edge_var_map_add (e, nresult, oop, oloc);
4177 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4178 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4179 variable is unchanged from input to the block and we can simply
4180 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4181 else
4183 location_t nloc
4184 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4185 FOR_EACH_EDGE (e, ei, old_bb->preds)
4186 redirect_edge_var_map_add (e, nresult, nop, nloc);
4190 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4191 we don't know what values from the other edges into NEW_BB to use. */
4192 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4194 gimple ophi = gsi_stmt (ogsi);
4195 tree oresult = gimple_phi_result (ophi);
4196 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4197 goto fail;
4200 /* Finally, move the edges and update the PHIs. */
4201 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4202 if (e->flags & EDGE_EH)
4204 /* ??? CFG manipluation routines do not try to update loop
4205 form on edge redirection. Do so manually here for now. */
4206 /* If we redirect a loop entry or latch edge that will either create
4207 a multiple entry loop or rotate the loop. If the loops merge
4208 we may have created a loop with multiple latches.
4209 All of this isn't easily fixed thus cancel the affected loop
4210 and mark the other loop as possibly having multiple latches. */
4211 if (current_loops
4212 && e->dest == e->dest->loop_father->header)
4214 e->dest->loop_father->header = NULL;
4215 e->dest->loop_father->latch = NULL;
4216 new_bb->loop_father->latch = NULL;
4217 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4219 redirect_eh_edge_1 (e, new_bb, change_region);
4220 redirect_edge_succ (e, new_bb);
4221 flush_pending_stmts (e);
4223 else
4224 ei_next (&ei);
4226 BITMAP_FREE (ophi_handled);
4227 return true;
4229 fail:
4230 FOR_EACH_EDGE (e, ei, old_bb->preds)
4231 redirect_edge_var_map_clear (e);
4232 BITMAP_FREE (ophi_handled);
4233 return false;
4236 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4237 old region to NEW_REGION at BB. */
4239 static void
4240 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4241 eh_landing_pad lp, eh_region new_region)
4243 gimple_stmt_iterator gsi;
4244 eh_landing_pad *pp;
4246 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4247 continue;
4248 *pp = lp->next_lp;
4250 lp->region = new_region;
4251 lp->next_lp = new_region->landing_pads;
4252 new_region->landing_pads = lp;
4254 /* Delete the RESX that was matched within the empty handler block. */
4255 gsi = gsi_last_bb (bb);
4256 unlink_stmt_vdef (gsi_stmt (gsi));
4257 gsi_remove (&gsi, true);
4259 /* Clean up E_OUT for the fallthru. */
4260 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4261 e_out->probability = REG_BR_PROB_BASE;
4264 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4265 unsplitting than unsplit_eh was prepared to handle, e.g. when
4266 multiple incoming edges and phis are involved. */
4268 static bool
4269 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4271 gimple_stmt_iterator gsi;
4272 tree lab;
4274 /* We really ought not have totally lost everything following
4275 a landing pad label. Given that BB is empty, there had better
4276 be a successor. */
4277 gcc_assert (e_out != NULL);
4279 /* The destination block must not already have a landing pad
4280 for a different region. */
4281 lab = NULL;
4282 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4284 gimple stmt = gsi_stmt (gsi);
4285 int lp_nr;
4287 if (gimple_code (stmt) != GIMPLE_LABEL)
4288 break;
4289 lab = gimple_label_label (stmt);
4290 lp_nr = EH_LANDING_PAD_NR (lab);
4291 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4292 return false;
4295 /* Attempt to move the PHIs into the successor block. */
4296 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4298 if (dump_file && (dump_flags & TDF_DETAILS))
4299 fprintf (dump_file,
4300 "Unsplit EH landing pad %d to block %i "
4301 "(via cleanup_empty_eh).\n",
4302 lp->index, e_out->dest->index);
4303 return true;
4306 return false;
4309 /* Return true if edge E_FIRST is part of an empty infinite loop
4310 or leads to such a loop through a series of single successor
4311 empty bbs. */
4313 static bool
4314 infinite_empty_loop_p (edge e_first)
4316 bool inf_loop = false;
4317 edge e;
4319 if (e_first->dest == e_first->src)
4320 return true;
4322 e_first->src->aux = (void *) 1;
4323 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4325 gimple_stmt_iterator gsi;
4326 if (e->dest->aux)
4328 inf_loop = true;
4329 break;
4331 e->dest->aux = (void *) 1;
4332 gsi = gsi_after_labels (e->dest);
4333 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4334 gsi_next_nondebug (&gsi);
4335 if (!gsi_end_p (gsi))
4336 break;
4338 e_first->src->aux = NULL;
4339 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4340 e->dest->aux = NULL;
4342 return inf_loop;
4345 /* Examine the block associated with LP to determine if it's an empty
4346 handler for its EH region. If so, attempt to redirect EH edges to
4347 an outer region. Return true the CFG was updated in any way. This
4348 is similar to jump forwarding, just across EH edges. */
4350 static bool
4351 cleanup_empty_eh (eh_landing_pad lp)
4353 basic_block bb = label_to_block (lp->post_landing_pad);
4354 gimple_stmt_iterator gsi;
4355 gimple resx;
4356 eh_region new_region;
4357 edge_iterator ei;
4358 edge e, e_out;
4359 bool has_non_eh_pred;
4360 bool ret = false;
4361 int new_lp_nr;
4363 /* There can be zero or one edges out of BB. This is the quickest test. */
4364 switch (EDGE_COUNT (bb->succs))
4366 case 0:
4367 e_out = NULL;
4368 break;
4369 case 1:
4370 e_out = single_succ_edge (bb);
4371 break;
4372 default:
4373 return false;
4376 resx = last_stmt (bb);
4377 if (resx && is_gimple_resx (resx))
4379 if (stmt_can_throw_external (resx))
4380 optimize_clobbers (bb);
4381 else if (sink_clobbers (bb))
4382 ret = true;
4385 gsi = gsi_after_labels (bb);
4387 /* Make sure to skip debug statements. */
4388 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4389 gsi_next_nondebug (&gsi);
4391 /* If the block is totally empty, look for more unsplitting cases. */
4392 if (gsi_end_p (gsi))
4394 /* For the degenerate case of an infinite loop bail out.
4395 If bb has no successors and is totally empty, which can happen e.g.
4396 because of incorrect noreturn attribute, bail out too. */
4397 if (e_out == NULL
4398 || infinite_empty_loop_p (e_out))
4399 return ret;
4401 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4404 /* The block should consist only of a single RESX statement, modulo a
4405 preceding call to __builtin_stack_restore if there is no outgoing
4406 edge, since the call can be eliminated in this case. */
4407 resx = gsi_stmt (gsi);
4408 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4410 gsi_next (&gsi);
4411 resx = gsi_stmt (gsi);
4413 if (!is_gimple_resx (resx))
4414 return ret;
4415 gcc_assert (gsi_one_before_end_p (gsi));
4417 /* Determine if there are non-EH edges, or resx edges into the handler. */
4418 has_non_eh_pred = false;
4419 FOR_EACH_EDGE (e, ei, bb->preds)
4420 if (!(e->flags & EDGE_EH))
4421 has_non_eh_pred = true;
4423 /* Find the handler that's outer of the empty handler by looking at
4424 where the RESX instruction was vectored. */
4425 new_lp_nr = lookup_stmt_eh_lp (resx);
4426 new_region = get_eh_region_from_lp_number (new_lp_nr);
4428 /* If there's no destination region within the current function,
4429 redirection is trivial via removing the throwing statements from
4430 the EH region, removing the EH edges, and allowing the block
4431 to go unreachable. */
4432 if (new_region == NULL)
4434 gcc_assert (e_out == NULL);
4435 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4436 if (e->flags & EDGE_EH)
4438 gimple stmt = last_stmt (e->src);
4439 remove_stmt_from_eh_lp (stmt);
4440 remove_edge (e);
4442 else
4443 ei_next (&ei);
4444 goto succeed;
4447 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4448 to handle the abort and allow the blocks to go unreachable. */
4449 if (new_region->type == ERT_MUST_NOT_THROW)
4451 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4452 if (e->flags & EDGE_EH)
4454 gimple stmt = last_stmt (e->src);
4455 remove_stmt_from_eh_lp (stmt);
4456 add_stmt_to_eh_lp (stmt, new_lp_nr);
4457 remove_edge (e);
4459 else
4460 ei_next (&ei);
4461 goto succeed;
4464 /* Try to redirect the EH edges and merge the PHIs into the destination
4465 landing pad block. If the merge succeeds, we'll already have redirected
4466 all the EH edges. The handler itself will go unreachable if there were
4467 no normal edges. */
4468 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4469 goto succeed;
4471 /* Finally, if all input edges are EH edges, then we can (potentially)
4472 reduce the number of transfers from the runtime by moving the landing
4473 pad from the original region to the new region. This is a win when
4474 we remove the last CLEANUP region along a particular exception
4475 propagation path. Since nothing changes except for the region with
4476 which the landing pad is associated, the PHI nodes do not need to be
4477 adjusted at all. */
4478 if (!has_non_eh_pred)
4480 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4481 if (dump_file && (dump_flags & TDF_DETAILS))
4482 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4483 lp->index, new_region->index);
4485 /* ??? The CFG didn't change, but we may have rendered the
4486 old EH region unreachable. Trigger a cleanup there. */
4487 return true;
4490 return ret;
4492 succeed:
4493 if (dump_file && (dump_flags & TDF_DETAILS))
4494 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4495 remove_eh_landing_pad (lp);
4496 return true;
4499 /* Do a post-order traversal of the EH region tree. Examine each
4500 post_landing_pad block and see if we can eliminate it as empty. */
4502 static bool
4503 cleanup_all_empty_eh (void)
4505 bool changed = false;
4506 eh_landing_pad lp;
4507 int i;
4509 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4510 if (lp)
4511 changed |= cleanup_empty_eh (lp);
4513 return changed;
4516 /* Perform cleanups and lowering of exception handling
4517 1) cleanups regions with handlers doing nothing are optimized out
4518 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4519 3) Info about regions that are containing instructions, and regions
4520 reachable via local EH edges is collected
4521 4) Eh tree is pruned for regions no longer necessary.
4523 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4524 Unify those that have the same failure decl and locus.
4527 static unsigned int
4528 execute_cleanup_eh_1 (void)
4530 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4531 looking up unreachable landing pads. */
4532 remove_unreachable_handlers ();
4534 /* Watch out for the region tree vanishing due to all unreachable. */
4535 if (cfun->eh->region_tree)
4537 bool changed = false;
4539 if (optimize)
4540 changed |= unsplit_all_eh ();
4541 changed |= cleanup_all_empty_eh ();
4543 if (changed)
4545 free_dominance_info (CDI_DOMINATORS);
4546 free_dominance_info (CDI_POST_DOMINATORS);
4548 /* We delayed all basic block deletion, as we may have performed
4549 cleanups on EH edges while non-EH edges were still present. */
4550 delete_unreachable_blocks ();
4552 /* We manipulated the landing pads. Remove any region that no
4553 longer has a landing pad. */
4554 remove_unreachable_handlers_no_lp ();
4556 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4560 return 0;
4563 namespace {
4565 const pass_data pass_data_cleanup_eh =
4567 GIMPLE_PASS, /* type */
4568 "ehcleanup", /* name */
4569 OPTGROUP_NONE, /* optinfo_flags */
4570 true, /* has_execute */
4571 TV_TREE_EH, /* tv_id */
4572 PROP_gimple_lcf, /* properties_required */
4573 0, /* properties_provided */
4574 0, /* properties_destroyed */
4575 0, /* todo_flags_start */
4576 TODO_verify_ssa, /* todo_flags_finish */
4579 class pass_cleanup_eh : public gimple_opt_pass
4581 public:
4582 pass_cleanup_eh (gcc::context *ctxt)
4583 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4586 /* opt_pass methods: */
4587 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4588 virtual bool gate (function *fun)
4590 return fun->eh != NULL && fun->eh->region_tree != NULL;
4593 virtual unsigned int execute (function *);
4595 }; // class pass_cleanup_eh
4597 unsigned int
4598 pass_cleanup_eh::execute (function *fun)
4600 int ret = execute_cleanup_eh_1 ();
4602 /* If the function no longer needs an EH personality routine
4603 clear it. This exposes cross-language inlining opportunities
4604 and avoids references to a never defined personality routine. */
4605 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4606 && function_needs_eh_personality (fun) != eh_personality_lang)
4607 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4609 return ret;
4612 } // anon namespace
4614 gimple_opt_pass *
4615 make_pass_cleanup_eh (gcc::context *ctxt)
4617 return new pass_cleanup_eh (ctxt);
4620 /* Verify that BB containing STMT as the last statement, has precisely the
4621 edge that make_eh_edges would create. */
4623 DEBUG_FUNCTION bool
4624 verify_eh_edges (gimple stmt)
4626 basic_block bb = gimple_bb (stmt);
4627 eh_landing_pad lp = NULL;
4628 int lp_nr;
4629 edge_iterator ei;
4630 edge e, eh_edge;
4632 lp_nr = lookup_stmt_eh_lp (stmt);
4633 if (lp_nr > 0)
4634 lp = get_eh_landing_pad_from_number (lp_nr);
4636 eh_edge = NULL;
4637 FOR_EACH_EDGE (e, ei, bb->succs)
4639 if (e->flags & EDGE_EH)
4641 if (eh_edge)
4643 error ("BB %i has multiple EH edges", bb->index);
4644 return true;
4646 else
4647 eh_edge = e;
4651 if (lp == NULL)
4653 if (eh_edge)
4655 error ("BB %i can not throw but has an EH edge", bb->index);
4656 return true;
4658 return false;
4661 if (!stmt_could_throw_p (stmt))
4663 error ("BB %i last statement has incorrectly set lp", bb->index);
4664 return true;
4667 if (eh_edge == NULL)
4669 error ("BB %i is missing an EH edge", bb->index);
4670 return true;
4673 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4675 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4676 return true;
4679 return false;
4682 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4684 DEBUG_FUNCTION bool
4685 verify_eh_dispatch_edge (gimple stmt)
4687 eh_region r;
4688 eh_catch c;
4689 basic_block src, dst;
4690 bool want_fallthru = true;
4691 edge_iterator ei;
4692 edge e, fall_edge;
4694 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4695 src = gimple_bb (stmt);
4697 FOR_EACH_EDGE (e, ei, src->succs)
4698 gcc_assert (e->aux == NULL);
4700 switch (r->type)
4702 case ERT_TRY:
4703 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4705 dst = label_to_block (c->label);
4706 e = find_edge (src, dst);
4707 if (e == NULL)
4709 error ("BB %i is missing an edge", src->index);
4710 return true;
4712 e->aux = (void *)e;
4714 /* A catch-all handler doesn't have a fallthru. */
4715 if (c->type_list == NULL)
4717 want_fallthru = false;
4718 break;
4721 break;
4723 case ERT_ALLOWED_EXCEPTIONS:
4724 dst = label_to_block (r->u.allowed.label);
4725 e = find_edge (src, dst);
4726 if (e == NULL)
4728 error ("BB %i is missing an edge", src->index);
4729 return true;
4731 e->aux = (void *)e;
4732 break;
4734 default:
4735 gcc_unreachable ();
4738 fall_edge = NULL;
4739 FOR_EACH_EDGE (e, ei, src->succs)
4741 if (e->flags & EDGE_FALLTHRU)
4743 if (fall_edge != NULL)
4745 error ("BB %i too many fallthru edges", src->index);
4746 return true;
4748 fall_edge = e;
4750 else if (e->aux)
4751 e->aux = NULL;
4752 else
4754 error ("BB %i has incorrect edge", src->index);
4755 return true;
4758 if ((fall_edge != NULL) ^ want_fallthru)
4760 error ("BB %i has incorrect fallthru edge", src->index);
4761 return true;
4764 return false;