lra-constraints.c (valid_address_p): New function, split out from...
[official-gcc.git] / gcc / tree-eh.c
blobb4be50c6fd396f190c2ebbf59ad40160e46bc903
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "except.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "ggc.h"
35 #include "diagnostic-core.h"
36 #include "gimple.h"
37 #include "target.h"
38 #include "cfgloop.h"
40 /* In some instances a tree and a gimple need to be stored in a same table,
41 i.e. in hash tables. This is a structure to do this. */
42 typedef union {tree *tp; tree t; gimple g;} treemple;
44 /* Nonzero if we are using EH to handle cleanups. */
45 static int using_eh_for_cleanups_p = 0;
47 void
48 using_eh_for_cleanups (void)
50 using_eh_for_cleanups_p = 1;
53 /* Misc functions used in this file. */
55 /* Remember and lookup EH landing pad data for arbitrary statements.
56 Really this means any statement that could_throw_p. We could
57 stuff this information into the stmt_ann data structure, but:
59 (1) We absolutely rely on this information being kept until
60 we get to rtl. Once we're done with lowering here, if we lose
61 the information there's no way to recover it!
63 (2) There are many more statements that *cannot* throw as
64 compared to those that can. We should be saving some amount
65 of space by only allocating memory for those that can throw. */
67 /* Add statement T in function IFUN to landing pad NUM. */
69 void
70 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
72 struct throw_stmt_node *n;
73 void **slot;
75 gcc_assert (num != 0);
77 n = ggc_alloc_throw_stmt_node ();
78 n->stmt = t;
79 n->lp_nr = num;
81 if (!get_eh_throw_stmt_table (ifun))
82 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
83 struct_ptr_eq,
84 ggc_free));
86 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
87 gcc_assert (!*slot);
88 *slot = n;
91 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
93 void
94 add_stmt_to_eh_lp (gimple t, int num)
96 add_stmt_to_eh_lp_fn (cfun, t, num);
99 /* Add statement T to the single EH landing pad in REGION. */
101 static void
102 record_stmt_eh_region (eh_region region, gimple t)
104 if (region == NULL)
105 return;
106 if (region->type == ERT_MUST_NOT_THROW)
107 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
108 else
110 eh_landing_pad lp = region->landing_pads;
111 if (lp == NULL)
112 lp = gen_eh_landing_pad (region);
113 else
114 gcc_assert (lp->next_lp == NULL);
115 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
120 /* Remove statement T in function IFUN from its EH landing pad. */
122 bool
123 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
125 struct throw_stmt_node dummy;
126 void **slot;
128 if (!get_eh_throw_stmt_table (ifun))
129 return false;
131 dummy.stmt = t;
132 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
133 NO_INSERT);
134 if (slot)
136 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
137 return true;
139 else
140 return false;
144 /* Remove statement T in the current function (cfun) from its
145 EH landing pad. */
147 bool
148 remove_stmt_from_eh_lp (gimple t)
150 return remove_stmt_from_eh_lp_fn (cfun, t);
153 /* Determine if statement T is inside an EH region in function IFUN.
154 Positive numbers indicate a landing pad index; negative numbers
155 indicate a MUST_NOT_THROW region index; zero indicates that the
156 statement is not recorded in the region table. */
159 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
161 struct throw_stmt_node *p, n;
163 if (ifun->eh->throw_stmt_table == NULL)
164 return 0;
166 n.stmt = t;
167 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
168 return p ? p->lp_nr : 0;
171 /* Likewise, but always use the current function. */
174 lookup_stmt_eh_lp (gimple t)
176 /* We can get called from initialized data when -fnon-call-exceptions
177 is on; prevent crash. */
178 if (!cfun)
179 return 0;
180 return lookup_stmt_eh_lp_fn (cfun, t);
183 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
184 nodes and LABEL_DECL nodes. We will use this during the second phase to
185 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
187 struct finally_tree_node
189 /* When storing a GIMPLE_TRY, we have to record a gimple. However
190 when deciding whether a GOTO to a certain LABEL_DECL (which is a
191 tree) leaves the TRY block, its necessary to record a tree in
192 this field. Thus a treemple is used. */
193 treemple child;
194 gimple parent;
197 /* Note that this table is *not* marked GTY. It is short-lived. */
198 static htab_t finally_tree;
200 static void
201 record_in_finally_tree (treemple child, gimple parent)
203 struct finally_tree_node *n;
204 void **slot;
206 n = XNEW (struct finally_tree_node);
207 n->child = child;
208 n->parent = parent;
210 slot = htab_find_slot (finally_tree, n, INSERT);
211 gcc_assert (!*slot);
212 *slot = n;
215 static void
216 collect_finally_tree (gimple stmt, gimple region);
218 /* Go through the gimple sequence. Works with collect_finally_tree to
219 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
221 static void
222 collect_finally_tree_1 (gimple_seq seq, gimple region)
224 gimple_stmt_iterator gsi;
226 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
227 collect_finally_tree (gsi_stmt (gsi), region);
230 static void
231 collect_finally_tree (gimple stmt, gimple region)
233 treemple temp;
235 switch (gimple_code (stmt))
237 case GIMPLE_LABEL:
238 temp.t = gimple_label_label (stmt);
239 record_in_finally_tree (temp, region);
240 break;
242 case GIMPLE_TRY:
243 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
245 temp.g = stmt;
246 record_in_finally_tree (temp, region);
247 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
248 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
250 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
252 collect_finally_tree_1 (gimple_try_eval (stmt), region);
253 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
255 break;
257 case GIMPLE_CATCH:
258 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
259 break;
261 case GIMPLE_EH_FILTER:
262 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
263 break;
265 case GIMPLE_EH_ELSE:
266 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
267 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
268 break;
270 default:
271 /* A type, a decl, or some kind of statement that we're not
272 interested in. Don't walk them. */
273 break;
278 /* Use the finally tree to determine if a jump from START to TARGET
279 would leave the try_finally node that START lives in. */
281 static bool
282 outside_finally_tree (treemple start, gimple target)
284 struct finally_tree_node n, *p;
288 n.child = start;
289 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
290 if (!p)
291 return true;
292 start.g = p->parent;
294 while (start.g != target);
296 return false;
299 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
300 nodes into a set of gotos, magic labels, and eh regions.
301 The eh region creation is straight-forward, but frobbing all the gotos
302 and such into shape isn't. */
304 /* The sequence into which we record all EH stuff. This will be
305 placed at the end of the function when we're all done. */
306 static gimple_seq eh_seq;
308 /* Record whether an EH region contains something that can throw,
309 indexed by EH region number. */
310 static bitmap eh_region_may_contain_throw_map;
312 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
313 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
314 The idea is to record a gimple statement for everything except for
315 the conditionals, which get their labels recorded. Since labels are
316 of type 'tree', we need this node to store both gimple and tree
317 objects. REPL_STMT is the sequence used to replace the goto/return
318 statement. CONT_STMT is used to store the statement that allows
319 the return/goto to jump to the original destination. */
321 struct goto_queue_node
323 treemple stmt;
324 location_t location;
325 gimple_seq repl_stmt;
326 gimple cont_stmt;
327 int index;
328 /* This is used when index >= 0 to indicate that stmt is a label (as
329 opposed to a goto stmt). */
330 int is_label;
333 /* State of the world while lowering. */
335 struct leh_state
337 /* What's "current" while constructing the eh region tree. These
338 correspond to variables of the same name in cfun->eh, which we
339 don't have easy access to. */
340 eh_region cur_region;
342 /* What's "current" for the purposes of __builtin_eh_pointer. For
343 a CATCH, this is the associated TRY. For an EH_FILTER, this is
344 the associated ALLOWED_EXCEPTIONS, etc. */
345 eh_region ehp_region;
347 /* Processing of TRY_FINALLY requires a bit more state. This is
348 split out into a separate structure so that we don't have to
349 copy so much when processing other nodes. */
350 struct leh_tf_state *tf;
353 struct leh_tf_state
355 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
356 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
357 this so that outside_finally_tree can reliably reference the tree used
358 in the collect_finally_tree data structures. */
359 gimple try_finally_expr;
360 gimple top_p;
362 /* While lowering a top_p usually it is expanded into multiple statements,
363 thus we need the following field to store them. */
364 gimple_seq top_p_seq;
366 /* The state outside this try_finally node. */
367 struct leh_state *outer;
369 /* The exception region created for it. */
370 eh_region region;
372 /* The goto queue. */
373 struct goto_queue_node *goto_queue;
374 size_t goto_queue_size;
375 size_t goto_queue_active;
377 /* Pointer map to help in searching goto_queue when it is large. */
378 struct pointer_map_t *goto_queue_map;
380 /* The set of unique labels seen as entries in the goto queue. */
381 VEC(tree,heap) *dest_array;
383 /* A label to be added at the end of the completed transformed
384 sequence. It will be set if may_fallthru was true *at one time*,
385 though subsequent transformations may have cleared that flag. */
386 tree fallthru_label;
388 /* True if it is possible to fall out the bottom of the try block.
389 Cleared if the fallthru is converted to a goto. */
390 bool may_fallthru;
392 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
393 bool may_return;
395 /* True if the finally block can receive an exception edge.
396 Cleared if the exception case is handled by code duplication. */
397 bool may_throw;
400 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
402 /* Search for STMT in the goto queue. Return the replacement,
403 or null if the statement isn't in the queue. */
405 #define LARGE_GOTO_QUEUE 20
407 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
409 static gimple_seq
410 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
412 unsigned int i;
413 void **slot;
415 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
417 for (i = 0; i < tf->goto_queue_active; i++)
418 if ( tf->goto_queue[i].stmt.g == stmt.g)
419 return tf->goto_queue[i].repl_stmt;
420 return NULL;
423 /* If we have a large number of entries in the goto_queue, create a
424 pointer map and use that for searching. */
426 if (!tf->goto_queue_map)
428 tf->goto_queue_map = pointer_map_create ();
429 for (i = 0; i < tf->goto_queue_active; i++)
431 slot = pointer_map_insert (tf->goto_queue_map,
432 tf->goto_queue[i].stmt.g);
433 gcc_assert (*slot == NULL);
434 *slot = &tf->goto_queue[i];
438 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
439 if (slot != NULL)
440 return (((struct goto_queue_node *) *slot)->repl_stmt);
442 return NULL;
445 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
446 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
447 then we can just splat it in, otherwise we add the new stmts immediately
448 after the GIMPLE_COND and redirect. */
450 static void
451 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
452 gimple_stmt_iterator *gsi)
454 tree label;
455 gimple_seq new_seq;
456 treemple temp;
457 location_t loc = gimple_location (gsi_stmt (*gsi));
459 temp.tp = tp;
460 new_seq = find_goto_replacement (tf, temp);
461 if (!new_seq)
462 return;
464 if (gimple_seq_singleton_p (new_seq)
465 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
467 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
468 return;
471 label = create_artificial_label (loc);
472 /* Set the new label for the GIMPLE_COND */
473 *tp = label;
475 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
476 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
479 /* The real work of replace_goto_queue. Returns with TSI updated to
480 point to the next statement. */
482 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
484 static void
485 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
486 gimple_stmt_iterator *gsi)
488 gimple_seq seq;
489 treemple temp;
490 temp.g = NULL;
492 switch (gimple_code (stmt))
494 case GIMPLE_GOTO:
495 case GIMPLE_RETURN:
496 temp.g = stmt;
497 seq = find_goto_replacement (tf, temp);
498 if (seq)
500 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
501 gsi_remove (gsi, false);
502 return;
504 break;
506 case GIMPLE_COND:
507 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
508 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
509 break;
511 case GIMPLE_TRY:
512 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
513 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
514 break;
515 case GIMPLE_CATCH:
516 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
517 break;
518 case GIMPLE_EH_FILTER:
519 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
520 break;
521 case GIMPLE_EH_ELSE:
522 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
523 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
524 break;
526 default:
527 /* These won't have gotos in them. */
528 break;
531 gsi_next (gsi);
534 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
536 static void
537 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
539 gimple_stmt_iterator gsi = gsi_start (*seq);
541 while (!gsi_end_p (gsi))
542 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
545 /* Replace all goto queue members. */
547 static void
548 replace_goto_queue (struct leh_tf_state *tf)
550 if (tf->goto_queue_active == 0)
551 return;
552 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
553 replace_goto_queue_stmt_list (&eh_seq, tf);
556 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
557 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
558 a gimple return. */
560 static void
561 record_in_goto_queue (struct leh_tf_state *tf,
562 treemple new_stmt,
563 int index,
564 bool is_label,
565 location_t location)
567 size_t active, size;
568 struct goto_queue_node *q;
570 gcc_assert (!tf->goto_queue_map);
572 active = tf->goto_queue_active;
573 size = tf->goto_queue_size;
574 if (active >= size)
576 size = (size ? size * 2 : 32);
577 tf->goto_queue_size = size;
578 tf->goto_queue
579 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
582 q = &tf->goto_queue[active];
583 tf->goto_queue_active = active + 1;
585 memset (q, 0, sizeof (*q));
586 q->stmt = new_stmt;
587 q->index = index;
588 q->location = location;
589 q->is_label = is_label;
592 /* Record the LABEL label in the goto queue contained in TF.
593 TF is not null. */
595 static void
596 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
597 location_t location)
599 int index;
600 treemple temp, new_stmt;
602 if (!label)
603 return;
605 /* Computed and non-local gotos do not get processed. Given
606 their nature we can neither tell whether we've escaped the
607 finally block nor redirect them if we knew. */
608 if (TREE_CODE (label) != LABEL_DECL)
609 return;
611 /* No need to record gotos that don't leave the try block. */
612 temp.t = label;
613 if (!outside_finally_tree (temp, tf->try_finally_expr))
614 return;
616 if (! tf->dest_array)
618 tf->dest_array = VEC_alloc (tree, heap, 10);
619 VEC_quick_push (tree, tf->dest_array, label);
620 index = 0;
622 else
624 int n = VEC_length (tree, tf->dest_array);
625 for (index = 0; index < n; ++index)
626 if (VEC_index (tree, tf->dest_array, index) == label)
627 break;
628 if (index == n)
629 VEC_safe_push (tree, heap, tf->dest_array, label);
632 /* In the case of a GOTO we want to record the destination label,
633 since with a GIMPLE_COND we have an easy access to the then/else
634 labels. */
635 new_stmt = stmt;
636 record_in_goto_queue (tf, new_stmt, index, true, location);
639 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
640 node, and if so record that fact in the goto queue associated with that
641 try_finally node. */
643 static void
644 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
646 struct leh_tf_state *tf = state->tf;
647 treemple new_stmt;
649 if (!tf)
650 return;
652 switch (gimple_code (stmt))
654 case GIMPLE_COND:
655 new_stmt.tp = gimple_op_ptr (stmt, 2);
656 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
657 EXPR_LOCATION (*new_stmt.tp));
658 new_stmt.tp = gimple_op_ptr (stmt, 3);
659 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
660 EXPR_LOCATION (*new_stmt.tp));
661 break;
662 case GIMPLE_GOTO:
663 new_stmt.g = stmt;
664 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
665 gimple_location (stmt));
666 break;
668 case GIMPLE_RETURN:
669 tf->may_return = true;
670 new_stmt.g = stmt;
671 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
672 break;
674 default:
675 gcc_unreachable ();
680 #ifdef ENABLE_CHECKING
681 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
682 was in fact structured, and we've not yet done jump threading, then none
683 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
685 static void
686 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
688 struct leh_tf_state *tf = state->tf;
689 size_t i, n;
691 if (!tf)
692 return;
694 n = gimple_switch_num_labels (switch_expr);
696 for (i = 0; i < n; ++i)
698 treemple temp;
699 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
700 temp.t = lab;
701 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
704 #else
705 #define verify_norecord_switch_expr(state, switch_expr)
706 #endif
708 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
709 non-null, insert it before the new branch. */
711 static void
712 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
714 gimple x;
716 /* In the case of a return, the queue node must be a gimple statement. */
717 gcc_assert (!q->is_label);
719 /* Note that the return value may have already been computed, e.g.,
721 int x;
722 int foo (void)
724 x = 0;
725 try {
726 return x;
727 } finally {
728 x++;
732 should return 0, not 1. We don't have to do anything to make
733 this happens because the return value has been placed in the
734 RESULT_DECL already. */
736 q->cont_stmt = q->stmt.g;
738 if (mod)
739 gimple_seq_add_seq (&q->repl_stmt, mod);
741 x = gimple_build_goto (finlab);
742 gimple_seq_add_stmt (&q->repl_stmt, x);
745 /* Similar, but easier, for GIMPLE_GOTO. */
747 static void
748 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
749 struct leh_tf_state *tf)
751 gimple x;
753 gcc_assert (q->is_label);
755 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
757 if (mod)
758 gimple_seq_add_seq (&q->repl_stmt, mod);
760 x = gimple_build_goto (finlab);
761 gimple_seq_add_stmt (&q->repl_stmt, x);
764 /* Emit a standard landing pad sequence into SEQ for REGION. */
766 static void
767 emit_post_landing_pad (gimple_seq *seq, eh_region region)
769 eh_landing_pad lp = region->landing_pads;
770 gimple x;
772 if (lp == NULL)
773 lp = gen_eh_landing_pad (region);
775 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
776 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
778 x = gimple_build_label (lp->post_landing_pad);
779 gimple_seq_add_stmt (seq, x);
782 /* Emit a RESX statement into SEQ for REGION. */
784 static void
785 emit_resx (gimple_seq *seq, eh_region region)
787 gimple x = gimple_build_resx (region->index);
788 gimple_seq_add_stmt (seq, x);
789 if (region->outer)
790 record_stmt_eh_region (region->outer, x);
793 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
795 static void
796 emit_eh_dispatch (gimple_seq *seq, eh_region region)
798 gimple x = gimple_build_eh_dispatch (region->index);
799 gimple_seq_add_stmt (seq, x);
802 /* Note that the current EH region may contain a throw, or a
803 call to a function which itself may contain a throw. */
805 static void
806 note_eh_region_may_contain_throw (eh_region region)
808 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
810 if (region->type == ERT_MUST_NOT_THROW)
811 break;
812 region = region->outer;
813 if (region == NULL)
814 break;
818 /* Check if REGION has been marked as containing a throw. If REGION is
819 NULL, this predicate is false. */
821 static inline bool
822 eh_region_may_contain_throw (eh_region r)
824 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
827 /* We want to transform
828 try { body; } catch { stuff; }
830 normal_seqence:
831 body;
832 over:
833 eh_seqence:
834 landing_pad:
835 stuff;
836 goto over;
838 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
839 should be placed before the second operand, or NULL. OVER is
840 an existing label that should be put at the exit, or NULL. */
842 static gimple_seq
843 frob_into_branch_around (gimple tp, eh_region region, tree over)
845 gimple x;
846 gimple_seq cleanup, result;
847 location_t loc = gimple_location (tp);
849 cleanup = gimple_try_cleanup (tp);
850 result = gimple_try_eval (tp);
852 if (region)
853 emit_post_landing_pad (&eh_seq, region);
855 if (gimple_seq_may_fallthru (cleanup))
857 if (!over)
858 over = create_artificial_label (loc);
859 x = gimple_build_goto (over);
860 gimple_seq_add_stmt (&cleanup, x);
862 gimple_seq_add_seq (&eh_seq, cleanup);
864 if (over)
866 x = gimple_build_label (over);
867 gimple_seq_add_stmt (&result, x);
869 return result;
872 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
873 Make sure to record all new labels found. */
875 static gimple_seq
876 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
877 location_t loc)
879 gimple region = NULL;
880 gimple_seq new_seq;
881 gimple_stmt_iterator gsi;
883 new_seq = copy_gimple_seq_and_replace_locals (seq);
885 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
887 gimple stmt = gsi_stmt (gsi);
888 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
890 tree block = gimple_block (stmt);
891 gimple_set_location (stmt, loc);
892 gimple_set_block (stmt, block);
896 if (outer_state->tf)
897 region = outer_state->tf->try_finally_expr;
898 collect_finally_tree_1 (new_seq, region);
900 return new_seq;
903 /* A subroutine of lower_try_finally. Create a fallthru label for
904 the given try_finally state. The only tricky bit here is that
905 we have to make sure to record the label in our outer context. */
907 static tree
908 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
910 tree label = tf->fallthru_label;
911 treemple temp;
913 if (!label)
915 label = create_artificial_label (gimple_location (tf->try_finally_expr));
916 tf->fallthru_label = label;
917 if (tf->outer->tf)
919 temp.t = label;
920 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
923 return label;
926 /* A subroutine of lower_try_finally. If FINALLY consits of a
927 GIMPLE_EH_ELSE node, return it. */
929 static inline gimple
930 get_eh_else (gimple_seq finally)
932 gimple x = gimple_seq_first_stmt (finally);
933 if (gimple_code (x) == GIMPLE_EH_ELSE)
935 gcc_assert (gimple_seq_singleton_p (finally));
936 return x;
938 return NULL;
941 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
942 langhook returns non-null, then the language requires that the exception
943 path out of a try_finally be treated specially. To wit: the code within
944 the finally block may not itself throw an exception. We have two choices
945 here. First we can duplicate the finally block and wrap it in a
946 must_not_throw region. Second, we can generate code like
948 try {
949 finally_block;
950 } catch {
951 if (fintmp == eh_edge)
952 protect_cleanup_actions;
955 where "fintmp" is the temporary used in the switch statement generation
956 alternative considered below. For the nonce, we always choose the first
957 option.
959 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
961 static void
962 honor_protect_cleanup_actions (struct leh_state *outer_state,
963 struct leh_state *this_state,
964 struct leh_tf_state *tf)
966 tree protect_cleanup_actions;
967 gimple_stmt_iterator gsi;
968 bool finally_may_fallthru;
969 gimple_seq finally;
970 gimple x, eh_else;
972 /* First check for nothing to do. */
973 if (lang_hooks.eh_protect_cleanup_actions == NULL)
974 return;
975 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
976 if (protect_cleanup_actions == NULL)
977 return;
979 finally = gimple_try_cleanup (tf->top_p);
980 eh_else = get_eh_else (finally);
982 /* Duplicate the FINALLY block. Only need to do this for try-finally,
983 and not for cleanups. If we've got an EH_ELSE, extract it now. */
984 if (eh_else)
986 finally = gimple_eh_else_e_body (eh_else);
987 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
989 else if (this_state)
990 finally = lower_try_finally_dup_block (finally, outer_state,
991 gimple_location (tf->try_finally_expr));
992 finally_may_fallthru = gimple_seq_may_fallthru (finally);
994 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
995 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
996 to be in an enclosing scope, but needs to be implemented at this level
997 to avoid a nesting violation (see wrap_temporary_cleanups in
998 cp/decl.c). Since it's logically at an outer level, we should call
999 terminate before we get to it, so strip it away before adding the
1000 MUST_NOT_THROW filter. */
1001 gsi = gsi_start (finally);
1002 x = gsi_stmt (gsi);
1003 if (gimple_code (x) == GIMPLE_TRY
1004 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1005 && gimple_try_catch_is_cleanup (x))
1007 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1008 gsi_remove (&gsi, false);
1011 /* Wrap the block with protect_cleanup_actions as the action. */
1012 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1013 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1014 GIMPLE_TRY_CATCH);
1015 finally = lower_eh_must_not_throw (outer_state, x);
1017 /* Drop all of this into the exception sequence. */
1018 emit_post_landing_pad (&eh_seq, tf->region);
1019 gimple_seq_add_seq (&eh_seq, finally);
1020 if (finally_may_fallthru)
1021 emit_resx (&eh_seq, tf->region);
1023 /* Having now been handled, EH isn't to be considered with
1024 the rest of the outgoing edges. */
1025 tf->may_throw = false;
1028 /* A subroutine of lower_try_finally. We have determined that there is
1029 no fallthru edge out of the finally block. This means that there is
1030 no outgoing edge corresponding to any incoming edge. Restructure the
1031 try_finally node for this special case. */
1033 static void
1034 lower_try_finally_nofallthru (struct leh_state *state,
1035 struct leh_tf_state *tf)
1037 tree lab;
1038 gimple x, eh_else;
1039 gimple_seq finally;
1040 struct goto_queue_node *q, *qe;
1042 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1044 /* We expect that tf->top_p is a GIMPLE_TRY. */
1045 finally = gimple_try_cleanup (tf->top_p);
1046 tf->top_p_seq = gimple_try_eval (tf->top_p);
1048 x = gimple_build_label (lab);
1049 gimple_seq_add_stmt (&tf->top_p_seq, x);
1051 q = tf->goto_queue;
1052 qe = q + tf->goto_queue_active;
1053 for (; q < qe; ++q)
1054 if (q->index < 0)
1055 do_return_redirection (q, lab, NULL);
1056 else
1057 do_goto_redirection (q, lab, NULL, tf);
1059 replace_goto_queue (tf);
1061 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1062 eh_else = get_eh_else (finally);
1063 if (eh_else)
1065 finally = gimple_eh_else_n_body (eh_else);
1066 lower_eh_constructs_1 (state, &finally);
1067 gimple_seq_add_seq (&tf->top_p_seq, finally);
1069 if (tf->may_throw)
1071 finally = gimple_eh_else_e_body (eh_else);
1072 lower_eh_constructs_1 (state, &finally);
1074 emit_post_landing_pad (&eh_seq, tf->region);
1075 gimple_seq_add_seq (&eh_seq, finally);
1078 else
1080 lower_eh_constructs_1 (state, &finally);
1081 gimple_seq_add_seq (&tf->top_p_seq, finally);
1083 if (tf->may_throw)
1085 emit_post_landing_pad (&eh_seq, tf->region);
1087 x = gimple_build_goto (lab);
1088 gimple_seq_add_stmt (&eh_seq, x);
1093 /* A subroutine of lower_try_finally. We have determined that there is
1094 exactly one destination of the finally block. Restructure the
1095 try_finally node for this special case. */
1097 static void
1098 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1100 struct goto_queue_node *q, *qe;
1101 gimple x;
1102 gimple_seq finally;
1103 gimple_stmt_iterator gsi;
1104 tree finally_label;
1105 location_t loc = gimple_location (tf->try_finally_expr);
1107 finally = gimple_try_cleanup (tf->top_p);
1108 tf->top_p_seq = gimple_try_eval (tf->top_p);
1110 /* Since there's only one destination, and the destination edge can only
1111 either be EH or non-EH, that implies that all of our incoming edges
1112 are of the same type. Therefore we can lower EH_ELSE immediately. */
1113 x = get_eh_else (finally);
1114 if (x)
1116 if (tf->may_throw)
1117 finally = gimple_eh_else_e_body (x);
1118 else
1119 finally = gimple_eh_else_n_body (x);
1122 lower_eh_constructs_1 (state, &finally);
1124 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1126 gimple stmt = gsi_stmt (gsi);
1127 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1129 tree block = gimple_block (stmt);
1130 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1131 gimple_set_block (stmt, block);
1135 if (tf->may_throw)
1137 /* Only reachable via the exception edge. Add the given label to
1138 the head of the FINALLY block. Append a RESX at the end. */
1139 emit_post_landing_pad (&eh_seq, tf->region);
1140 gimple_seq_add_seq (&eh_seq, finally);
1141 emit_resx (&eh_seq, tf->region);
1142 return;
1145 if (tf->may_fallthru)
1147 /* Only reachable via the fallthru edge. Do nothing but let
1148 the two blocks run together; we'll fall out the bottom. */
1149 gimple_seq_add_seq (&tf->top_p_seq, finally);
1150 return;
1153 finally_label = create_artificial_label (loc);
1154 x = gimple_build_label (finally_label);
1155 gimple_seq_add_stmt (&tf->top_p_seq, x);
1157 gimple_seq_add_seq (&tf->top_p_seq, finally);
1159 q = tf->goto_queue;
1160 qe = q + tf->goto_queue_active;
1162 if (tf->may_return)
1164 /* Reachable by return expressions only. Redirect them. */
1165 for (; q < qe; ++q)
1166 do_return_redirection (q, finally_label, NULL);
1167 replace_goto_queue (tf);
1169 else
1171 /* Reachable by goto expressions only. Redirect them. */
1172 for (; q < qe; ++q)
1173 do_goto_redirection (q, finally_label, NULL, tf);
1174 replace_goto_queue (tf);
1176 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1178 /* Reachable by goto to fallthru label only. Redirect it
1179 to the new label (already created, sadly), and do not
1180 emit the final branch out, or the fallthru label. */
1181 tf->fallthru_label = NULL;
1182 return;
1186 /* Place the original return/goto to the original destination
1187 immediately after the finally block. */
1188 x = tf->goto_queue[0].cont_stmt;
1189 gimple_seq_add_stmt (&tf->top_p_seq, x);
1190 maybe_record_in_goto_queue (state, x);
1193 /* A subroutine of lower_try_finally. There are multiple edges incoming
1194 and outgoing from the finally block. Implement this by duplicating the
1195 finally block for every destination. */
1197 static void
1198 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1200 gimple_seq finally;
1201 gimple_seq new_stmt;
1202 gimple_seq seq;
1203 gimple x, eh_else;
1204 tree tmp;
1205 location_t tf_loc = gimple_location (tf->try_finally_expr);
1207 finally = gimple_try_cleanup (tf->top_p);
1209 /* Notice EH_ELSE, and simplify some of the remaining code
1210 by considering FINALLY to be the normal return path only. */
1211 eh_else = get_eh_else (finally);
1212 if (eh_else)
1213 finally = gimple_eh_else_n_body (eh_else);
1215 tf->top_p_seq = gimple_try_eval (tf->top_p);
1216 new_stmt = NULL;
1218 if (tf->may_fallthru)
1220 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1221 lower_eh_constructs_1 (state, &seq);
1222 gimple_seq_add_seq (&new_stmt, seq);
1224 tmp = lower_try_finally_fallthru_label (tf);
1225 x = gimple_build_goto (tmp);
1226 gimple_seq_add_stmt (&new_stmt, x);
1229 if (tf->may_throw)
1231 /* We don't need to copy the EH path of EH_ELSE,
1232 since it is only emitted once. */
1233 if (eh_else)
1234 seq = gimple_eh_else_e_body (eh_else);
1235 else
1236 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1237 lower_eh_constructs_1 (state, &seq);
1239 emit_post_landing_pad (&eh_seq, tf->region);
1240 gimple_seq_add_seq (&eh_seq, seq);
1241 emit_resx (&eh_seq, tf->region);
1244 if (tf->goto_queue)
1246 struct goto_queue_node *q, *qe;
1247 int return_index, index;
1248 struct labels_s
1250 struct goto_queue_node *q;
1251 tree label;
1252 } *labels;
1254 return_index = VEC_length (tree, tf->dest_array);
1255 labels = XCNEWVEC (struct labels_s, return_index + 1);
1257 q = tf->goto_queue;
1258 qe = q + tf->goto_queue_active;
1259 for (; q < qe; q++)
1261 index = q->index < 0 ? return_index : q->index;
1263 if (!labels[index].q)
1264 labels[index].q = q;
1267 for (index = 0; index < return_index + 1; index++)
1269 tree lab;
1271 q = labels[index].q;
1272 if (! q)
1273 continue;
1275 lab = labels[index].label
1276 = create_artificial_label (tf_loc);
1278 if (index == return_index)
1279 do_return_redirection (q, lab, NULL);
1280 else
1281 do_goto_redirection (q, lab, NULL, tf);
1283 x = gimple_build_label (lab);
1284 gimple_seq_add_stmt (&new_stmt, x);
1286 seq = lower_try_finally_dup_block (finally, state, q->location);
1287 lower_eh_constructs_1 (state, &seq);
1288 gimple_seq_add_seq (&new_stmt, seq);
1290 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1291 maybe_record_in_goto_queue (state, q->cont_stmt);
1294 for (q = tf->goto_queue; q < qe; q++)
1296 tree lab;
1298 index = q->index < 0 ? return_index : q->index;
1300 if (labels[index].q == q)
1301 continue;
1303 lab = labels[index].label;
1305 if (index == return_index)
1306 do_return_redirection (q, lab, NULL);
1307 else
1308 do_goto_redirection (q, lab, NULL, tf);
1311 replace_goto_queue (tf);
1312 free (labels);
1315 /* Need to link new stmts after running replace_goto_queue due
1316 to not wanting to process the same goto stmts twice. */
1317 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1320 /* A subroutine of lower_try_finally. There are multiple edges incoming
1321 and outgoing from the finally block. Implement this by instrumenting
1322 each incoming edge and creating a switch statement at the end of the
1323 finally block that branches to the appropriate destination. */
1325 static void
1326 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1328 struct goto_queue_node *q, *qe;
1329 tree finally_tmp, finally_label;
1330 int return_index, eh_index, fallthru_index;
1331 int nlabels, ndests, j, last_case_index;
1332 tree last_case;
1333 VEC (tree,heap) *case_label_vec;
1334 gimple_seq switch_body = NULL;
1335 gimple x, eh_else;
1336 tree tmp;
1337 gimple switch_stmt;
1338 gimple_seq finally;
1339 struct pointer_map_t *cont_map = NULL;
1340 /* The location of the TRY_FINALLY stmt. */
1341 location_t tf_loc = gimple_location (tf->try_finally_expr);
1342 /* The location of the finally block. */
1343 location_t finally_loc;
1345 finally = gimple_try_cleanup (tf->top_p);
1346 eh_else = get_eh_else (finally);
1348 /* Mash the TRY block to the head of the chain. */
1349 tf->top_p_seq = gimple_try_eval (tf->top_p);
1351 /* The location of the finally is either the last stmt in the finally
1352 block or the location of the TRY_FINALLY itself. */
1353 x = gimple_seq_last_stmt (finally);
1354 finally_loc = x ? gimple_location (x) : tf_loc;
1356 /* Lower the finally block itself. */
1357 lower_eh_constructs_1 (state, &finally);
1359 /* Prepare for switch statement generation. */
1360 nlabels = VEC_length (tree, tf->dest_array);
1361 return_index = nlabels;
1362 eh_index = return_index + tf->may_return;
1363 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1364 ndests = fallthru_index + tf->may_fallthru;
1366 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1367 finally_label = create_artificial_label (finally_loc);
1369 /* We use VEC_quick_push on case_label_vec throughout this function,
1370 since we know the size in advance and allocate precisely as muce
1371 space as needed. */
1372 case_label_vec = VEC_alloc (tree, heap, ndests);
1373 last_case = NULL;
1374 last_case_index = 0;
1376 /* Begin inserting code for getting to the finally block. Things
1377 are done in this order to correspond to the sequence the code is
1378 laid out. */
1380 if (tf->may_fallthru)
1382 x = gimple_build_assign (finally_tmp,
1383 build_int_cst (integer_type_node,
1384 fallthru_index));
1385 gimple_seq_add_stmt (&tf->top_p_seq, x);
1387 tmp = build_int_cst (integer_type_node, fallthru_index);
1388 last_case = build_case_label (tmp, NULL,
1389 create_artificial_label (tf_loc));
1390 VEC_quick_push (tree, case_label_vec, last_case);
1391 last_case_index++;
1393 x = gimple_build_label (CASE_LABEL (last_case));
1394 gimple_seq_add_stmt (&switch_body, x);
1396 tmp = lower_try_finally_fallthru_label (tf);
1397 x = gimple_build_goto (tmp);
1398 gimple_seq_add_stmt (&switch_body, x);
1401 /* For EH_ELSE, emit the exception path (plus resx) now, then
1402 subsequently we only need consider the normal path. */
1403 if (eh_else)
1405 if (tf->may_throw)
1407 finally = gimple_eh_else_e_body (eh_else);
1408 lower_eh_constructs_1 (state, &finally);
1410 emit_post_landing_pad (&eh_seq, tf->region);
1411 gimple_seq_add_seq (&eh_seq, finally);
1412 emit_resx (&eh_seq, tf->region);
1415 finally = gimple_eh_else_n_body (eh_else);
1417 else if (tf->may_throw)
1419 emit_post_landing_pad (&eh_seq, tf->region);
1421 x = gimple_build_assign (finally_tmp,
1422 build_int_cst (integer_type_node, eh_index));
1423 gimple_seq_add_stmt (&eh_seq, x);
1425 x = gimple_build_goto (finally_label);
1426 gimple_seq_add_stmt (&eh_seq, x);
1428 tmp = build_int_cst (integer_type_node, eh_index);
1429 last_case = build_case_label (tmp, NULL,
1430 create_artificial_label (tf_loc));
1431 VEC_quick_push (tree, case_label_vec, last_case);
1432 last_case_index++;
1434 x = gimple_build_label (CASE_LABEL (last_case));
1435 gimple_seq_add_stmt (&eh_seq, x);
1436 emit_resx (&eh_seq, tf->region);
1439 x = gimple_build_label (finally_label);
1440 gimple_seq_add_stmt (&tf->top_p_seq, x);
1442 gimple_seq_add_seq (&tf->top_p_seq, finally);
1444 /* Redirect each incoming goto edge. */
1445 q = tf->goto_queue;
1446 qe = q + tf->goto_queue_active;
1447 j = last_case_index + tf->may_return;
1448 /* Prepare the assignments to finally_tmp that are executed upon the
1449 entrance through a particular edge. */
1450 for (; q < qe; ++q)
1452 gimple_seq mod = NULL;
1453 int switch_id;
1454 unsigned int case_index;
1456 if (q->index < 0)
1458 x = gimple_build_assign (finally_tmp,
1459 build_int_cst (integer_type_node,
1460 return_index));
1461 gimple_seq_add_stmt (&mod, x);
1462 do_return_redirection (q, finally_label, mod);
1463 switch_id = return_index;
1465 else
1467 x = gimple_build_assign (finally_tmp,
1468 build_int_cst (integer_type_node, q->index));
1469 gimple_seq_add_stmt (&mod, x);
1470 do_goto_redirection (q, finally_label, mod, tf);
1471 switch_id = q->index;
1474 case_index = j + q->index;
1475 if (VEC_length (tree, case_label_vec) <= case_index
1476 || !VEC_index (tree, case_label_vec, case_index))
1478 tree case_lab;
1479 void **slot;
1480 tmp = build_int_cst (integer_type_node, switch_id);
1481 case_lab = build_case_label (tmp, NULL,
1482 create_artificial_label (tf_loc));
1483 /* We store the cont_stmt in the pointer map, so that we can recover
1484 it in the loop below. */
1485 if (!cont_map)
1486 cont_map = pointer_map_create ();
1487 slot = pointer_map_insert (cont_map, case_lab);
1488 *slot = q->cont_stmt;
1489 VEC_quick_push (tree, case_label_vec, case_lab);
1492 for (j = last_case_index; j < last_case_index + nlabels; j++)
1494 gimple cont_stmt;
1495 void **slot;
1497 last_case = VEC_index (tree, case_label_vec, j);
1499 gcc_assert (last_case);
1500 gcc_assert (cont_map);
1502 slot = pointer_map_contains (cont_map, last_case);
1503 gcc_assert (slot);
1504 cont_stmt = *(gimple *) slot;
1506 x = gimple_build_label (CASE_LABEL (last_case));
1507 gimple_seq_add_stmt (&switch_body, x);
1508 gimple_seq_add_stmt (&switch_body, cont_stmt);
1509 maybe_record_in_goto_queue (state, cont_stmt);
1511 if (cont_map)
1512 pointer_map_destroy (cont_map);
1514 replace_goto_queue (tf);
1516 /* Make sure that the last case is the default label, as one is required.
1517 Then sort the labels, which is also required in GIMPLE. */
1518 CASE_LOW (last_case) = NULL;
1519 sort_case_labels (case_label_vec);
1521 /* Build the switch statement, setting last_case to be the default
1522 label. */
1523 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1524 case_label_vec);
1525 gimple_set_location (switch_stmt, finally_loc);
1527 /* Need to link SWITCH_STMT after running replace_goto_queue
1528 due to not wanting to process the same goto stmts twice. */
1529 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1530 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1533 /* Decide whether or not we are going to duplicate the finally block.
1534 There are several considerations.
1536 First, if this is Java, then the finally block contains code
1537 written by the user. It has line numbers associated with it,
1538 so duplicating the block means it's difficult to set a breakpoint.
1539 Since controlling code generation via -g is verboten, we simply
1540 never duplicate code without optimization.
1542 Second, we'd like to prevent egregious code growth. One way to
1543 do this is to estimate the size of the finally block, multiply
1544 that by the number of copies we'd need to make, and compare against
1545 the estimate of the size of the switch machinery we'd have to add. */
1547 static bool
1548 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1550 int f_estimate, sw_estimate;
1551 gimple eh_else;
1553 /* If there's an EH_ELSE involved, the exception path is separate
1554 and really doesn't come into play for this computation. */
1555 eh_else = get_eh_else (finally);
1556 if (eh_else)
1558 ndests -= may_throw;
1559 finally = gimple_eh_else_n_body (eh_else);
1562 if (!optimize)
1564 gimple_stmt_iterator gsi;
1566 if (ndests == 1)
1567 return true;
1569 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1571 gimple stmt = gsi_stmt (gsi);
1572 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1573 return false;
1575 return true;
1578 /* Finally estimate N times, plus N gotos. */
1579 f_estimate = count_insns_seq (finally, &eni_size_weights);
1580 f_estimate = (f_estimate + 1) * ndests;
1582 /* Switch statement (cost 10), N variable assignments, N gotos. */
1583 sw_estimate = 10 + 2 * ndests;
1585 /* Optimize for size clearly wants our best guess. */
1586 if (optimize_function_for_size_p (cfun))
1587 return f_estimate < sw_estimate;
1589 /* ??? These numbers are completely made up so far. */
1590 if (optimize > 1)
1591 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1592 else
1593 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1596 /* REG is the enclosing region for a possible cleanup region, or the region
1597 itself. Returns TRUE if such a region would be unreachable.
1599 Cleanup regions within a must-not-throw region aren't actually reachable
1600 even if there are throwing stmts within them, because the personality
1601 routine will call terminate before unwinding. */
1603 static bool
1604 cleanup_is_dead_in (eh_region reg)
1606 while (reg && reg->type == ERT_CLEANUP)
1607 reg = reg->outer;
1608 return (reg && reg->type == ERT_MUST_NOT_THROW);
1611 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1612 to a sequence of labels and blocks, plus the exception region trees
1613 that record all the magic. This is complicated by the need to
1614 arrange for the FINALLY block to be executed on all exits. */
1616 static gimple_seq
1617 lower_try_finally (struct leh_state *state, gimple tp)
1619 struct leh_tf_state this_tf;
1620 struct leh_state this_state;
1621 int ndests;
1622 gimple_seq old_eh_seq;
1624 /* Process the try block. */
1626 memset (&this_tf, 0, sizeof (this_tf));
1627 this_tf.try_finally_expr = tp;
1628 this_tf.top_p = tp;
1629 this_tf.outer = state;
1630 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1632 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1633 this_state.cur_region = this_tf.region;
1635 else
1637 this_tf.region = NULL;
1638 this_state.cur_region = state->cur_region;
1641 this_state.ehp_region = state->ehp_region;
1642 this_state.tf = &this_tf;
1644 old_eh_seq = eh_seq;
1645 eh_seq = NULL;
1647 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1649 /* Determine if the try block is escaped through the bottom. */
1650 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1652 /* Determine if any exceptions are possible within the try block. */
1653 if (this_tf.region)
1654 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1655 if (this_tf.may_throw)
1656 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1658 /* Determine how many edges (still) reach the finally block. Or rather,
1659 how many destinations are reached by the finally block. Use this to
1660 determine how we process the finally block itself. */
1662 ndests = VEC_length (tree, this_tf.dest_array);
1663 ndests += this_tf.may_fallthru;
1664 ndests += this_tf.may_return;
1665 ndests += this_tf.may_throw;
1667 /* If the FINALLY block is not reachable, dike it out. */
1668 if (ndests == 0)
1670 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1671 gimple_try_set_cleanup (tp, NULL);
1673 /* If the finally block doesn't fall through, then any destination
1674 we might try to impose there isn't reached either. There may be
1675 some minor amount of cleanup and redirection still needed. */
1676 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1677 lower_try_finally_nofallthru (state, &this_tf);
1679 /* We can easily special-case redirection to a single destination. */
1680 else if (ndests == 1)
1681 lower_try_finally_onedest (state, &this_tf);
1682 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1683 gimple_try_cleanup (tp)))
1684 lower_try_finally_copy (state, &this_tf);
1685 else
1686 lower_try_finally_switch (state, &this_tf);
1688 /* If someone requested we add a label at the end of the transformed
1689 block, do so. */
1690 if (this_tf.fallthru_label)
1692 /* This must be reached only if ndests == 0. */
1693 gimple x = gimple_build_label (this_tf.fallthru_label);
1694 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1697 VEC_free (tree, heap, this_tf.dest_array);
1698 free (this_tf.goto_queue);
1699 if (this_tf.goto_queue_map)
1700 pointer_map_destroy (this_tf.goto_queue_map);
1702 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1703 If there was no old eh_seq, then the append is trivially already done. */
1704 if (old_eh_seq)
1706 if (eh_seq == NULL)
1707 eh_seq = old_eh_seq;
1708 else
1710 gimple_seq new_eh_seq = eh_seq;
1711 eh_seq = old_eh_seq;
1712 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1716 return this_tf.top_p_seq;
1719 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1720 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1721 exception region trees that records all the magic. */
1723 static gimple_seq
1724 lower_catch (struct leh_state *state, gimple tp)
1726 eh_region try_region = NULL;
1727 struct leh_state this_state = *state;
1728 gimple_stmt_iterator gsi;
1729 tree out_label;
1730 gimple_seq new_seq, cleanup;
1731 gimple x;
1732 location_t try_catch_loc = gimple_location (tp);
1734 if (flag_exceptions)
1736 try_region = gen_eh_region_try (state->cur_region);
1737 this_state.cur_region = try_region;
1740 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1742 if (!eh_region_may_contain_throw (try_region))
1743 return gimple_try_eval (tp);
1745 new_seq = NULL;
1746 emit_eh_dispatch (&new_seq, try_region);
1747 emit_resx (&new_seq, try_region);
1749 this_state.cur_region = state->cur_region;
1750 this_state.ehp_region = try_region;
1752 out_label = NULL;
1753 cleanup = gimple_try_cleanup (tp);
1754 for (gsi = gsi_start (cleanup);
1755 !gsi_end_p (gsi);
1756 gsi_next (&gsi))
1758 eh_catch c;
1759 gimple gcatch;
1760 gimple_seq handler;
1762 gcatch = gsi_stmt (gsi);
1763 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1765 handler = gimple_catch_handler (gcatch);
1766 lower_eh_constructs_1 (&this_state, &handler);
1768 c->label = create_artificial_label (UNKNOWN_LOCATION);
1769 x = gimple_build_label (c->label);
1770 gimple_seq_add_stmt (&new_seq, x);
1772 gimple_seq_add_seq (&new_seq, handler);
1774 if (gimple_seq_may_fallthru (new_seq))
1776 if (!out_label)
1777 out_label = create_artificial_label (try_catch_loc);
1779 x = gimple_build_goto (out_label);
1780 gimple_seq_add_stmt (&new_seq, x);
1782 if (!c->type_list)
1783 break;
1786 gimple_try_set_cleanup (tp, new_seq);
1788 return frob_into_branch_around (tp, try_region, out_label);
1791 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1792 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1793 region trees that record all the magic. */
1795 static gimple_seq
1796 lower_eh_filter (struct leh_state *state, gimple tp)
1798 struct leh_state this_state = *state;
1799 eh_region this_region = NULL;
1800 gimple inner, x;
1801 gimple_seq new_seq;
1803 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1805 if (flag_exceptions)
1807 this_region = gen_eh_region_allowed (state->cur_region,
1808 gimple_eh_filter_types (inner));
1809 this_state.cur_region = this_region;
1812 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1814 if (!eh_region_may_contain_throw (this_region))
1815 return gimple_try_eval (tp);
1817 new_seq = NULL;
1818 this_state.cur_region = state->cur_region;
1819 this_state.ehp_region = this_region;
1821 emit_eh_dispatch (&new_seq, this_region);
1822 emit_resx (&new_seq, this_region);
1824 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1825 x = gimple_build_label (this_region->u.allowed.label);
1826 gimple_seq_add_stmt (&new_seq, x);
1828 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1829 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1831 gimple_try_set_cleanup (tp, new_seq);
1833 return frob_into_branch_around (tp, this_region, NULL);
1836 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1837 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1838 plus the exception region trees that record all the magic. */
1840 static gimple_seq
1841 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1843 struct leh_state this_state = *state;
1845 if (flag_exceptions)
1847 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1848 eh_region this_region;
1850 this_region = gen_eh_region_must_not_throw (state->cur_region);
1851 this_region->u.must_not_throw.failure_decl
1852 = gimple_eh_must_not_throw_fndecl (inner);
1853 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1855 /* In order to get mangling applied to this decl, we must mark it
1856 used now. Otherwise, pass_ipa_free_lang_data won't think it
1857 needs to happen. */
1858 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1860 this_state.cur_region = this_region;
1863 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1865 return gimple_try_eval (tp);
1868 /* Implement a cleanup expression. This is similar to try-finally,
1869 except that we only execute the cleanup block for exception edges. */
1871 static gimple_seq
1872 lower_cleanup (struct leh_state *state, gimple tp)
1874 struct leh_state this_state = *state;
1875 eh_region this_region = NULL;
1876 struct leh_tf_state fake_tf;
1877 gimple_seq result;
1878 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1880 if (flag_exceptions && !cleanup_dead)
1882 this_region = gen_eh_region_cleanup (state->cur_region);
1883 this_state.cur_region = this_region;
1886 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1888 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1889 return gimple_try_eval (tp);
1891 /* Build enough of a try-finally state so that we can reuse
1892 honor_protect_cleanup_actions. */
1893 memset (&fake_tf, 0, sizeof (fake_tf));
1894 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1895 fake_tf.outer = state;
1896 fake_tf.region = this_region;
1897 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1898 fake_tf.may_throw = true;
1900 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1902 if (fake_tf.may_throw)
1904 /* In this case honor_protect_cleanup_actions had nothing to do,
1905 and we should process this normally. */
1906 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1907 result = frob_into_branch_around (tp, this_region,
1908 fake_tf.fallthru_label);
1910 else
1912 /* In this case honor_protect_cleanup_actions did nearly all of
1913 the work. All we have left is to append the fallthru_label. */
1915 result = gimple_try_eval (tp);
1916 if (fake_tf.fallthru_label)
1918 gimple x = gimple_build_label (fake_tf.fallthru_label);
1919 gimple_seq_add_stmt (&result, x);
1922 return result;
1925 /* Main loop for lowering eh constructs. Also moves gsi to the next
1926 statement. */
1928 static void
1929 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1931 gimple_seq replace;
1932 gimple x;
1933 gimple stmt = gsi_stmt (*gsi);
1935 switch (gimple_code (stmt))
1937 case GIMPLE_CALL:
1939 tree fndecl = gimple_call_fndecl (stmt);
1940 tree rhs, lhs;
1942 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1943 switch (DECL_FUNCTION_CODE (fndecl))
1945 case BUILT_IN_EH_POINTER:
1946 /* The front end may have generated a call to
1947 __builtin_eh_pointer (0) within a catch region. Replace
1948 this zero argument with the current catch region number. */
1949 if (state->ehp_region)
1951 tree nr = build_int_cst (integer_type_node,
1952 state->ehp_region->index);
1953 gimple_call_set_arg (stmt, 0, nr);
1955 else
1957 /* The user has dome something silly. Remove it. */
1958 rhs = null_pointer_node;
1959 goto do_replace;
1961 break;
1963 case BUILT_IN_EH_FILTER:
1964 /* ??? This should never appear, but since it's a builtin it
1965 is accessible to abuse by users. Just remove it and
1966 replace the use with the arbitrary value zero. */
1967 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1968 do_replace:
1969 lhs = gimple_call_lhs (stmt);
1970 x = gimple_build_assign (lhs, rhs);
1971 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1972 /* FALLTHRU */
1974 case BUILT_IN_EH_COPY_VALUES:
1975 /* Likewise this should not appear. Remove it. */
1976 gsi_remove (gsi, true);
1977 return;
1979 default:
1980 break;
1983 /* FALLTHRU */
1985 case GIMPLE_ASSIGN:
1986 /* If the stmt can throw use a new temporary for the assignment
1987 to a LHS. This makes sure the old value of the LHS is
1988 available on the EH edge. Only do so for statements that
1989 potentially fall through (no noreturn calls e.g.), otherwise
1990 this new assignment might create fake fallthru regions. */
1991 if (stmt_could_throw_p (stmt)
1992 && gimple_has_lhs (stmt)
1993 && gimple_stmt_may_fallthru (stmt)
1994 && !tree_could_throw_p (gimple_get_lhs (stmt))
1995 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1997 tree lhs = gimple_get_lhs (stmt);
1998 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1999 gimple s = gimple_build_assign (lhs, tmp);
2000 gimple_set_location (s, gimple_location (stmt));
2001 gimple_set_block (s, gimple_block (stmt));
2002 gimple_set_lhs (stmt, tmp);
2003 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2004 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2005 DECL_GIMPLE_REG_P (tmp) = 1;
2006 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2008 /* Look for things that can throw exceptions, and record them. */
2009 if (state->cur_region && stmt_could_throw_p (stmt))
2011 record_stmt_eh_region (state->cur_region, stmt);
2012 note_eh_region_may_contain_throw (state->cur_region);
2014 break;
2016 case GIMPLE_COND:
2017 case GIMPLE_GOTO:
2018 case GIMPLE_RETURN:
2019 maybe_record_in_goto_queue (state, stmt);
2020 break;
2022 case GIMPLE_SWITCH:
2023 verify_norecord_switch_expr (state, stmt);
2024 break;
2026 case GIMPLE_TRY:
2027 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2028 replace = lower_try_finally (state, stmt);
2029 else
2031 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2032 if (!x)
2034 replace = gimple_try_eval (stmt);
2035 lower_eh_constructs_1 (state, &replace);
2037 else
2038 switch (gimple_code (x))
2040 case GIMPLE_CATCH:
2041 replace = lower_catch (state, stmt);
2042 break;
2043 case GIMPLE_EH_FILTER:
2044 replace = lower_eh_filter (state, stmt);
2045 break;
2046 case GIMPLE_EH_MUST_NOT_THROW:
2047 replace = lower_eh_must_not_throw (state, stmt);
2048 break;
2049 case GIMPLE_EH_ELSE:
2050 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2051 gcc_unreachable ();
2052 default:
2053 replace = lower_cleanup (state, stmt);
2054 break;
2058 /* Remove the old stmt and insert the transformed sequence
2059 instead. */
2060 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2061 gsi_remove (gsi, true);
2063 /* Return since we don't want gsi_next () */
2064 return;
2066 case GIMPLE_EH_ELSE:
2067 /* We should be eliminating this in lower_try_finally et al. */
2068 gcc_unreachable ();
2070 default:
2071 /* A type, a decl, or some kind of statement that we're not
2072 interested in. Don't walk them. */
2073 break;
2076 gsi_next (gsi);
2079 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2081 static void
2082 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2084 gimple_stmt_iterator gsi;
2085 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2086 lower_eh_constructs_2 (state, &gsi);
2089 static unsigned int
2090 lower_eh_constructs (void)
2092 struct leh_state null_state;
2093 gimple_seq bodyp;
2095 bodyp = gimple_body (current_function_decl);
2096 if (bodyp == NULL)
2097 return 0;
2099 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2100 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2101 memset (&null_state, 0, sizeof (null_state));
2103 collect_finally_tree_1 (bodyp, NULL);
2104 lower_eh_constructs_1 (&null_state, &bodyp);
2105 gimple_set_body (current_function_decl, bodyp);
2107 /* We assume there's a return statement, or something, at the end of
2108 the function, and thus ploping the EH sequence afterward won't
2109 change anything. */
2110 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2111 gimple_seq_add_seq (&bodyp, eh_seq);
2113 /* We assume that since BODYP already existed, adding EH_SEQ to it
2114 didn't change its value, and we don't have to re-set the function. */
2115 gcc_assert (bodyp == gimple_body (current_function_decl));
2117 htab_delete (finally_tree);
2118 BITMAP_FREE (eh_region_may_contain_throw_map);
2119 eh_seq = NULL;
2121 /* If this function needs a language specific EH personality routine
2122 and the frontend didn't already set one do so now. */
2123 if (function_needs_eh_personality (cfun) == eh_personality_lang
2124 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2125 DECL_FUNCTION_PERSONALITY (current_function_decl)
2126 = lang_hooks.eh_personality ();
2128 return 0;
2131 struct gimple_opt_pass pass_lower_eh =
2134 GIMPLE_PASS,
2135 "eh", /* name */
2136 NULL, /* gate */
2137 lower_eh_constructs, /* execute */
2138 NULL, /* sub */
2139 NULL, /* next */
2140 0, /* static_pass_number */
2141 TV_TREE_EH, /* tv_id */
2142 PROP_gimple_lcf, /* properties_required */
2143 PROP_gimple_leh, /* properties_provided */
2144 0, /* properties_destroyed */
2145 0, /* todo_flags_start */
2146 0 /* todo_flags_finish */
2150 /* Create the multiple edges from an EH_DISPATCH statement to all of
2151 the possible handlers for its EH region. Return true if there's
2152 no fallthru edge; false if there is. */
2154 bool
2155 make_eh_dispatch_edges (gimple stmt)
2157 eh_region r;
2158 eh_catch c;
2159 basic_block src, dst;
2161 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2162 src = gimple_bb (stmt);
2164 switch (r->type)
2166 case ERT_TRY:
2167 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2169 dst = label_to_block (c->label);
2170 make_edge (src, dst, 0);
2172 /* A catch-all handler doesn't have a fallthru. */
2173 if (c->type_list == NULL)
2174 return false;
2176 break;
2178 case ERT_ALLOWED_EXCEPTIONS:
2179 dst = label_to_block (r->u.allowed.label);
2180 make_edge (src, dst, 0);
2181 break;
2183 default:
2184 gcc_unreachable ();
2187 return true;
2190 /* Create the single EH edge from STMT to its nearest landing pad,
2191 if there is such a landing pad within the current function. */
2193 void
2194 make_eh_edges (gimple stmt)
2196 basic_block src, dst;
2197 eh_landing_pad lp;
2198 int lp_nr;
2200 lp_nr = lookup_stmt_eh_lp (stmt);
2201 if (lp_nr <= 0)
2202 return;
2204 lp = get_eh_landing_pad_from_number (lp_nr);
2205 gcc_assert (lp != NULL);
2207 src = gimple_bb (stmt);
2208 dst = label_to_block (lp->post_landing_pad);
2209 make_edge (src, dst, EDGE_EH);
2212 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2213 do not actually perform the final edge redirection.
2215 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2216 we intend to change the destination EH region as well; this means
2217 EH_LANDING_PAD_NR must already be set on the destination block label.
2218 If false, we're being called from generic cfg manipulation code and we
2219 should preserve our place within the region tree. */
2221 static void
2222 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2224 eh_landing_pad old_lp, new_lp;
2225 basic_block old_bb;
2226 gimple throw_stmt;
2227 int old_lp_nr, new_lp_nr;
2228 tree old_label, new_label;
2229 edge_iterator ei;
2230 edge e;
2232 old_bb = edge_in->dest;
2233 old_label = gimple_block_label (old_bb);
2234 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2235 gcc_assert (old_lp_nr > 0);
2236 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2238 throw_stmt = last_stmt (edge_in->src);
2239 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2241 new_label = gimple_block_label (new_bb);
2243 /* Look for an existing region that might be using NEW_BB already. */
2244 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2245 if (new_lp_nr)
2247 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2248 gcc_assert (new_lp);
2250 /* Unless CHANGE_REGION is true, the new and old landing pad
2251 had better be associated with the same EH region. */
2252 gcc_assert (change_region || new_lp->region == old_lp->region);
2254 else
2256 new_lp = NULL;
2257 gcc_assert (!change_region);
2260 /* Notice when we redirect the last EH edge away from OLD_BB. */
2261 FOR_EACH_EDGE (e, ei, old_bb->preds)
2262 if (e != edge_in && (e->flags & EDGE_EH))
2263 break;
2265 if (new_lp)
2267 /* NEW_LP already exists. If there are still edges into OLD_LP,
2268 there's nothing to do with the EH tree. If there are no more
2269 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2270 If CHANGE_REGION is true, then our caller is expecting to remove
2271 the landing pad. */
2272 if (e == NULL && !change_region)
2273 remove_eh_landing_pad (old_lp);
2275 else
2277 /* No correct landing pad exists. If there are no more edges
2278 into OLD_LP, then we can simply re-use the existing landing pad.
2279 Otherwise, we have to create a new landing pad. */
2280 if (e == NULL)
2282 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2283 new_lp = old_lp;
2285 else
2286 new_lp = gen_eh_landing_pad (old_lp->region);
2287 new_lp->post_landing_pad = new_label;
2288 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2291 /* Maybe move the throwing statement to the new region. */
2292 if (old_lp != new_lp)
2294 remove_stmt_from_eh_lp (throw_stmt);
2295 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2299 /* Redirect EH edge E to NEW_BB. */
2301 edge
2302 redirect_eh_edge (edge edge_in, basic_block new_bb)
2304 redirect_eh_edge_1 (edge_in, new_bb, false);
2305 return ssa_redirect_edge (edge_in, new_bb);
2308 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2309 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2310 The actual edge update will happen in the caller. */
2312 void
2313 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2315 tree new_lab = gimple_block_label (new_bb);
2316 bool any_changed = false;
2317 basic_block old_bb;
2318 eh_region r;
2319 eh_catch c;
2321 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2322 switch (r->type)
2324 case ERT_TRY:
2325 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2327 old_bb = label_to_block (c->label);
2328 if (old_bb == e->dest)
2330 c->label = new_lab;
2331 any_changed = true;
2334 break;
2336 case ERT_ALLOWED_EXCEPTIONS:
2337 old_bb = label_to_block (r->u.allowed.label);
2338 gcc_assert (old_bb == e->dest);
2339 r->u.allowed.label = new_lab;
2340 any_changed = true;
2341 break;
2343 default:
2344 gcc_unreachable ();
2347 gcc_assert (any_changed);
2350 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2352 bool
2353 operation_could_trap_helper_p (enum tree_code op,
2354 bool fp_operation,
2355 bool honor_trapv,
2356 bool honor_nans,
2357 bool honor_snans,
2358 tree divisor,
2359 bool *handled)
2361 *handled = true;
2362 switch (op)
2364 case TRUNC_DIV_EXPR:
2365 case CEIL_DIV_EXPR:
2366 case FLOOR_DIV_EXPR:
2367 case ROUND_DIV_EXPR:
2368 case EXACT_DIV_EXPR:
2369 case CEIL_MOD_EXPR:
2370 case FLOOR_MOD_EXPR:
2371 case ROUND_MOD_EXPR:
2372 case TRUNC_MOD_EXPR:
2373 case RDIV_EXPR:
2374 if (honor_snans || honor_trapv)
2375 return true;
2376 if (fp_operation)
2377 return flag_trapping_math;
2378 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2379 return true;
2380 return false;
2382 case LT_EXPR:
2383 case LE_EXPR:
2384 case GT_EXPR:
2385 case GE_EXPR:
2386 case LTGT_EXPR:
2387 /* Some floating point comparisons may trap. */
2388 return honor_nans;
2390 case EQ_EXPR:
2391 case NE_EXPR:
2392 case UNORDERED_EXPR:
2393 case ORDERED_EXPR:
2394 case UNLT_EXPR:
2395 case UNLE_EXPR:
2396 case UNGT_EXPR:
2397 case UNGE_EXPR:
2398 case UNEQ_EXPR:
2399 return honor_snans;
2401 case CONVERT_EXPR:
2402 case FIX_TRUNC_EXPR:
2403 /* Conversion of floating point might trap. */
2404 return honor_nans;
2406 case NEGATE_EXPR:
2407 case ABS_EXPR:
2408 case CONJ_EXPR:
2409 /* These operations don't trap with floating point. */
2410 if (honor_trapv)
2411 return true;
2412 return false;
2414 case PLUS_EXPR:
2415 case MINUS_EXPR:
2416 case MULT_EXPR:
2417 /* Any floating arithmetic may trap. */
2418 if (fp_operation && flag_trapping_math)
2419 return true;
2420 if (honor_trapv)
2421 return true;
2422 return false;
2424 case COMPLEX_EXPR:
2425 case CONSTRUCTOR:
2426 /* Constructing an object cannot trap. */
2427 return false;
2429 default:
2430 /* Any floating arithmetic may trap. */
2431 if (fp_operation && flag_trapping_math)
2432 return true;
2434 *handled = false;
2435 return false;
2439 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2440 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2441 type operands that may trap. If OP is a division operator, DIVISOR contains
2442 the value of the divisor. */
2444 bool
2445 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2446 tree divisor)
2448 bool honor_nans = (fp_operation && flag_trapping_math
2449 && !flag_finite_math_only);
2450 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2451 bool handled;
2453 if (TREE_CODE_CLASS (op) != tcc_comparison
2454 && TREE_CODE_CLASS (op) != tcc_unary
2455 && TREE_CODE_CLASS (op) != tcc_binary)
2456 return false;
2458 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2459 honor_nans, honor_snans, divisor,
2460 &handled);
2463 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2464 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2465 This routine expects only GIMPLE lhs or rhs input. */
2467 bool
2468 tree_could_trap_p (tree expr)
2470 enum tree_code code;
2471 bool fp_operation = false;
2472 bool honor_trapv = false;
2473 tree t, base, div = NULL_TREE;
2475 if (!expr)
2476 return false;
2478 code = TREE_CODE (expr);
2479 t = TREE_TYPE (expr);
2481 if (t)
2483 if (COMPARISON_CLASS_P (expr))
2484 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2485 else
2486 fp_operation = FLOAT_TYPE_P (t);
2487 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2490 if (TREE_CODE_CLASS (code) == tcc_binary)
2491 div = TREE_OPERAND (expr, 1);
2492 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2493 return true;
2495 restart:
2496 switch (code)
2498 case TARGET_MEM_REF:
2499 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2500 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2501 return false;
2502 return !TREE_THIS_NOTRAP (expr);
2504 case COMPONENT_REF:
2505 case REALPART_EXPR:
2506 case IMAGPART_EXPR:
2507 case BIT_FIELD_REF:
2508 case VIEW_CONVERT_EXPR:
2509 case WITH_SIZE_EXPR:
2510 expr = TREE_OPERAND (expr, 0);
2511 code = TREE_CODE (expr);
2512 goto restart;
2514 case ARRAY_RANGE_REF:
2515 base = TREE_OPERAND (expr, 0);
2516 if (tree_could_trap_p (base))
2517 return true;
2518 if (TREE_THIS_NOTRAP (expr))
2519 return false;
2520 return !range_in_array_bounds_p (expr);
2522 case ARRAY_REF:
2523 base = TREE_OPERAND (expr, 0);
2524 if (tree_could_trap_p (base))
2525 return true;
2526 if (TREE_THIS_NOTRAP (expr))
2527 return false;
2528 return !in_array_bounds_p (expr);
2530 case MEM_REF:
2531 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2532 return false;
2533 /* Fallthru. */
2534 case INDIRECT_REF:
2535 return !TREE_THIS_NOTRAP (expr);
2537 case ASM_EXPR:
2538 return TREE_THIS_VOLATILE (expr);
2540 case CALL_EXPR:
2541 t = get_callee_fndecl (expr);
2542 /* Assume that calls to weak functions may trap. */
2543 if (!t || !DECL_P (t))
2544 return true;
2545 if (DECL_WEAK (t))
2546 return tree_could_trap_p (t);
2547 return false;
2549 case FUNCTION_DECL:
2550 /* Assume that accesses to weak functions may trap, unless we know
2551 they are certainly defined in current TU or in some other
2552 LTO partition. */
2553 if (DECL_WEAK (expr))
2555 struct cgraph_node *node;
2556 if (!DECL_EXTERNAL (expr))
2557 return false;
2558 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2559 if (node && node->symbol.in_other_partition)
2560 return false;
2561 return true;
2563 return false;
2565 case VAR_DECL:
2566 /* Assume that accesses to weak vars may trap, unless we know
2567 they are certainly defined in current TU or in some other
2568 LTO partition. */
2569 if (DECL_WEAK (expr))
2571 struct varpool_node *node;
2572 if (!DECL_EXTERNAL (expr))
2573 return false;
2574 node = varpool_variable_node (varpool_get_node (expr), NULL);
2575 if (node && node->symbol.in_other_partition)
2576 return false;
2577 return true;
2579 return false;
2581 default:
2582 return false;
2587 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2588 an assignment or a conditional) may throw. */
2590 static bool
2591 stmt_could_throw_1_p (gimple stmt)
2593 enum tree_code code = gimple_expr_code (stmt);
2594 bool honor_nans = false;
2595 bool honor_snans = false;
2596 bool fp_operation = false;
2597 bool honor_trapv = false;
2598 tree t;
2599 size_t i;
2600 bool handled, ret;
2602 if (TREE_CODE_CLASS (code) == tcc_comparison
2603 || TREE_CODE_CLASS (code) == tcc_unary
2604 || TREE_CODE_CLASS (code) == tcc_binary)
2606 if (is_gimple_assign (stmt)
2607 && TREE_CODE_CLASS (code) == tcc_comparison)
2608 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2609 else if (gimple_code (stmt) == GIMPLE_COND)
2610 t = TREE_TYPE (gimple_cond_lhs (stmt));
2611 else
2612 t = gimple_expr_type (stmt);
2613 fp_operation = FLOAT_TYPE_P (t);
2614 if (fp_operation)
2616 honor_nans = flag_trapping_math && !flag_finite_math_only;
2617 honor_snans = flag_signaling_nans != 0;
2619 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2620 honor_trapv = true;
2623 /* Check if the main expression may trap. */
2624 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2625 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2626 honor_nans, honor_snans, t,
2627 &handled);
2628 if (handled)
2629 return ret;
2631 /* If the expression does not trap, see if any of the individual operands may
2632 trap. */
2633 for (i = 0; i < gimple_num_ops (stmt); i++)
2634 if (tree_could_trap_p (gimple_op (stmt, i)))
2635 return true;
2637 return false;
2641 /* Return true if statement STMT could throw an exception. */
2643 bool
2644 stmt_could_throw_p (gimple stmt)
2646 if (!flag_exceptions)
2647 return false;
2649 /* The only statements that can throw an exception are assignments,
2650 conditionals, calls, resx, and asms. */
2651 switch (gimple_code (stmt))
2653 case GIMPLE_RESX:
2654 return true;
2656 case GIMPLE_CALL:
2657 return !gimple_call_nothrow_p (stmt);
2659 case GIMPLE_ASSIGN:
2660 case GIMPLE_COND:
2661 if (!cfun->can_throw_non_call_exceptions)
2662 return false;
2663 return stmt_could_throw_1_p (stmt);
2665 case GIMPLE_ASM:
2666 if (!cfun->can_throw_non_call_exceptions)
2667 return false;
2668 return gimple_asm_volatile_p (stmt);
2670 default:
2671 return false;
2676 /* Return true if expression T could throw an exception. */
2678 bool
2679 tree_could_throw_p (tree t)
2681 if (!flag_exceptions)
2682 return false;
2683 if (TREE_CODE (t) == MODIFY_EXPR)
2685 if (cfun->can_throw_non_call_exceptions
2686 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2687 return true;
2688 t = TREE_OPERAND (t, 1);
2691 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2692 t = TREE_OPERAND (t, 0);
2693 if (TREE_CODE (t) == CALL_EXPR)
2694 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2695 if (cfun->can_throw_non_call_exceptions)
2696 return tree_could_trap_p (t);
2697 return false;
2700 /* Return true if STMT can throw an exception that is not caught within
2701 the current function (CFUN). */
2703 bool
2704 stmt_can_throw_external (gimple stmt)
2706 int lp_nr;
2708 if (!stmt_could_throw_p (stmt))
2709 return false;
2711 lp_nr = lookup_stmt_eh_lp (stmt);
2712 return lp_nr == 0;
2715 /* Return true if STMT can throw an exception that is caught within
2716 the current function (CFUN). */
2718 bool
2719 stmt_can_throw_internal (gimple stmt)
2721 int lp_nr;
2723 if (!stmt_could_throw_p (stmt))
2724 return false;
2726 lp_nr = lookup_stmt_eh_lp (stmt);
2727 return lp_nr > 0;
2730 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2731 remove any entry it might have from the EH table. Return true if
2732 any change was made. */
2734 bool
2735 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2737 if (stmt_could_throw_p (stmt))
2738 return false;
2739 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2742 /* Likewise, but always use the current function. */
2744 bool
2745 maybe_clean_eh_stmt (gimple stmt)
2747 return maybe_clean_eh_stmt_fn (cfun, stmt);
2750 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2751 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2752 in the table if it should be in there. Return TRUE if a replacement was
2753 done that my require an EH edge purge. */
2755 bool
2756 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2758 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2760 if (lp_nr != 0)
2762 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2764 if (new_stmt == old_stmt && new_stmt_could_throw)
2765 return false;
2767 remove_stmt_from_eh_lp (old_stmt);
2768 if (new_stmt_could_throw)
2770 add_stmt_to_eh_lp (new_stmt, lp_nr);
2771 return false;
2773 else
2774 return true;
2777 return false;
2780 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2781 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2782 operand is the return value of duplicate_eh_regions. */
2784 bool
2785 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2786 struct function *old_fun, gimple old_stmt,
2787 struct pointer_map_t *map, int default_lp_nr)
2789 int old_lp_nr, new_lp_nr;
2790 void **slot;
2792 if (!stmt_could_throw_p (new_stmt))
2793 return false;
2795 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2796 if (old_lp_nr == 0)
2798 if (default_lp_nr == 0)
2799 return false;
2800 new_lp_nr = default_lp_nr;
2802 else if (old_lp_nr > 0)
2804 eh_landing_pad old_lp, new_lp;
2806 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2807 slot = pointer_map_contains (map, old_lp);
2808 new_lp = (eh_landing_pad) *slot;
2809 new_lp_nr = new_lp->index;
2811 else
2813 eh_region old_r, new_r;
2815 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2816 slot = pointer_map_contains (map, old_r);
2817 new_r = (eh_region) *slot;
2818 new_lp_nr = -new_r->index;
2821 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2822 return true;
2825 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2826 and thus no remapping is required. */
2828 bool
2829 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2831 int lp_nr;
2833 if (!stmt_could_throw_p (new_stmt))
2834 return false;
2836 lp_nr = lookup_stmt_eh_lp (old_stmt);
2837 if (lp_nr == 0)
2838 return false;
2840 add_stmt_to_eh_lp (new_stmt, lp_nr);
2841 return true;
2844 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2845 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2846 this only handles handlers consisting of a single call, as that's the
2847 important case for C++: a destructor call for a particular object showing
2848 up in multiple handlers. */
2850 static bool
2851 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2853 gimple_stmt_iterator gsi;
2854 gimple ones, twos;
2855 unsigned int ai;
2857 gsi = gsi_start (oneh);
2858 if (!gsi_one_before_end_p (gsi))
2859 return false;
2860 ones = gsi_stmt (gsi);
2862 gsi = gsi_start (twoh);
2863 if (!gsi_one_before_end_p (gsi))
2864 return false;
2865 twos = gsi_stmt (gsi);
2867 if (!is_gimple_call (ones)
2868 || !is_gimple_call (twos)
2869 || gimple_call_lhs (ones)
2870 || gimple_call_lhs (twos)
2871 || gimple_call_chain (ones)
2872 || gimple_call_chain (twos)
2873 || !gimple_call_same_target_p (ones, twos)
2874 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2875 return false;
2877 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2878 if (!operand_equal_p (gimple_call_arg (ones, ai),
2879 gimple_call_arg (twos, ai), 0))
2880 return false;
2882 return true;
2885 /* Optimize
2886 try { A() } finally { try { ~B() } catch { ~A() } }
2887 try { ... } finally { ~A() }
2888 into
2889 try { A() } catch { ~B() }
2890 try { ~B() ... } finally { ~A() }
2892 This occurs frequently in C++, where A is a local variable and B is a
2893 temporary used in the initializer for A. */
2895 static void
2896 optimize_double_finally (gimple one, gimple two)
2898 gimple oneh;
2899 gimple_stmt_iterator gsi;
2900 gimple_seq cleanup;
2902 cleanup = gimple_try_cleanup (one);
2903 gsi = gsi_start (cleanup);
2904 if (!gsi_one_before_end_p (gsi))
2905 return;
2907 oneh = gsi_stmt (gsi);
2908 if (gimple_code (oneh) != GIMPLE_TRY
2909 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2910 return;
2912 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2914 gimple_seq seq = gimple_try_eval (oneh);
2916 gimple_try_set_cleanup (one, seq);
2917 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2918 seq = copy_gimple_seq_and_replace_locals (seq);
2919 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2920 gimple_try_set_eval (two, seq);
2924 /* Perform EH refactoring optimizations that are simpler to do when code
2925 flow has been lowered but EH structures haven't. */
2927 static void
2928 refactor_eh_r (gimple_seq seq)
2930 gimple_stmt_iterator gsi;
2931 gimple one, two;
2933 one = NULL;
2934 two = NULL;
2935 gsi = gsi_start (seq);
2936 while (1)
2938 one = two;
2939 if (gsi_end_p (gsi))
2940 two = NULL;
2941 else
2942 two = gsi_stmt (gsi);
2943 if (one
2944 && two
2945 && gimple_code (one) == GIMPLE_TRY
2946 && gimple_code (two) == GIMPLE_TRY
2947 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2948 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2949 optimize_double_finally (one, two);
2950 if (one)
2951 switch (gimple_code (one))
2953 case GIMPLE_TRY:
2954 refactor_eh_r (gimple_try_eval (one));
2955 refactor_eh_r (gimple_try_cleanup (one));
2956 break;
2957 case GIMPLE_CATCH:
2958 refactor_eh_r (gimple_catch_handler (one));
2959 break;
2960 case GIMPLE_EH_FILTER:
2961 refactor_eh_r (gimple_eh_filter_failure (one));
2962 break;
2963 case GIMPLE_EH_ELSE:
2964 refactor_eh_r (gimple_eh_else_n_body (one));
2965 refactor_eh_r (gimple_eh_else_e_body (one));
2966 break;
2967 default:
2968 break;
2970 if (two)
2971 gsi_next (&gsi);
2972 else
2973 break;
2977 static unsigned
2978 refactor_eh (void)
2980 refactor_eh_r (gimple_body (current_function_decl));
2981 return 0;
2984 static bool
2985 gate_refactor_eh (void)
2987 return flag_exceptions != 0;
2990 struct gimple_opt_pass pass_refactor_eh =
2993 GIMPLE_PASS,
2994 "ehopt", /* name */
2995 gate_refactor_eh, /* gate */
2996 refactor_eh, /* execute */
2997 NULL, /* sub */
2998 NULL, /* next */
2999 0, /* static_pass_number */
3000 TV_TREE_EH, /* tv_id */
3001 PROP_gimple_lcf, /* properties_required */
3002 0, /* properties_provided */
3003 0, /* properties_destroyed */
3004 0, /* todo_flags_start */
3005 0 /* todo_flags_finish */
3009 /* At the end of gimple optimization, we can lower RESX. */
3011 static bool
3012 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
3014 int lp_nr;
3015 eh_region src_r, dst_r;
3016 gimple_stmt_iterator gsi;
3017 gimple x;
3018 tree fn, src_nr;
3019 bool ret = false;
3021 lp_nr = lookup_stmt_eh_lp (stmt);
3022 if (lp_nr != 0)
3023 dst_r = get_eh_region_from_lp_number (lp_nr);
3024 else
3025 dst_r = NULL;
3027 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3028 gsi = gsi_last_bb (bb);
3030 if (src_r == NULL)
3032 /* We can wind up with no source region when pass_cleanup_eh shows
3033 that there are no entries into an eh region and deletes it, but
3034 then the block that contains the resx isn't removed. This can
3035 happen without optimization when the switch statement created by
3036 lower_try_finally_switch isn't simplified to remove the eh case.
3038 Resolve this by expanding the resx node to an abort. */
3040 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3041 x = gimple_build_call (fn, 0);
3042 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3044 while (EDGE_COUNT (bb->succs) > 0)
3045 remove_edge (EDGE_SUCC (bb, 0));
3047 else if (dst_r)
3049 /* When we have a destination region, we resolve this by copying
3050 the excptr and filter values into place, and changing the edge
3051 to immediately after the landing pad. */
3052 edge e;
3054 if (lp_nr < 0)
3056 basic_block new_bb;
3057 void **slot;
3058 tree lab;
3060 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3061 the failure decl into a new block, if needed. */
3062 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3064 slot = pointer_map_contains (mnt_map, dst_r);
3065 if (slot == NULL)
3067 gimple_stmt_iterator gsi2;
3069 new_bb = create_empty_bb (bb);
3070 if (current_loops)
3071 add_bb_to_loop (new_bb, bb->loop_father);
3072 lab = gimple_block_label (new_bb);
3073 gsi2 = gsi_start_bb (new_bb);
3075 fn = dst_r->u.must_not_throw.failure_decl;
3076 x = gimple_build_call (fn, 0);
3077 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3078 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3080 slot = pointer_map_insert (mnt_map, dst_r);
3081 *slot = lab;
3083 else
3085 lab = (tree) *slot;
3086 new_bb = label_to_block (lab);
3089 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3090 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3091 e->count = bb->count;
3092 e->probability = REG_BR_PROB_BASE;
3094 else
3096 edge_iterator ei;
3097 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3099 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3100 src_nr = build_int_cst (integer_type_node, src_r->index);
3101 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3102 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3104 /* Update the flags for the outgoing edge. */
3105 e = single_succ_edge (bb);
3106 gcc_assert (e->flags & EDGE_EH);
3107 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3109 /* If there are no more EH users of the landing pad, delete it. */
3110 FOR_EACH_EDGE (e, ei, e->dest->preds)
3111 if (e->flags & EDGE_EH)
3112 break;
3113 if (e == NULL)
3115 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3116 remove_eh_landing_pad (lp);
3120 ret = true;
3122 else
3124 tree var;
3126 /* When we don't have a destination region, this exception escapes
3127 up the call chain. We resolve this by generating a call to the
3128 _Unwind_Resume library function. */
3130 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3131 with no arguments for C++ and Java. Check for that. */
3132 if (src_r->use_cxa_end_cleanup)
3134 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3135 x = gimple_build_call (fn, 0);
3136 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3138 else
3140 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3141 src_nr = build_int_cst (integer_type_node, src_r->index);
3142 x = gimple_build_call (fn, 1, src_nr);
3143 var = create_tmp_var (ptr_type_node, NULL);
3144 var = make_ssa_name (var, x);
3145 gimple_call_set_lhs (x, var);
3146 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3148 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3149 x = gimple_build_call (fn, 1, var);
3150 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3153 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3156 gsi_remove (&gsi, true);
3158 return ret;
3161 static unsigned
3162 execute_lower_resx (void)
3164 basic_block bb;
3165 struct pointer_map_t *mnt_map;
3166 bool dominance_invalidated = false;
3167 bool any_rewritten = false;
3169 mnt_map = pointer_map_create ();
3171 FOR_EACH_BB (bb)
3173 gimple last = last_stmt (bb);
3174 if (last && is_gimple_resx (last))
3176 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3177 any_rewritten = true;
3181 pointer_map_destroy (mnt_map);
3183 if (dominance_invalidated)
3185 free_dominance_info (CDI_DOMINATORS);
3186 free_dominance_info (CDI_POST_DOMINATORS);
3189 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3192 static bool
3193 gate_lower_resx (void)
3195 return flag_exceptions != 0;
3198 struct gimple_opt_pass pass_lower_resx =
3201 GIMPLE_PASS,
3202 "resx", /* name */
3203 gate_lower_resx, /* gate */
3204 execute_lower_resx, /* execute */
3205 NULL, /* sub */
3206 NULL, /* next */
3207 0, /* static_pass_number */
3208 TV_TREE_EH, /* tv_id */
3209 PROP_gimple_lcf, /* properties_required */
3210 0, /* properties_provided */
3211 0, /* properties_destroyed */
3212 0, /* todo_flags_start */
3213 TODO_verify_flow /* todo_flags_finish */
3217 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3218 external throw. */
3220 static void
3221 optimize_clobbers (basic_block bb)
3223 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3224 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3226 gimple stmt = gsi_stmt (gsi);
3227 if (is_gimple_debug (stmt))
3228 continue;
3229 if (!gimple_clobber_p (stmt)
3230 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3231 return;
3232 unlink_stmt_vdef (stmt);
3233 gsi_remove (&gsi, true);
3234 release_defs (stmt);
3238 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3239 internal throw to successor BB. */
3241 static int
3242 sink_clobbers (basic_block bb)
3244 edge e;
3245 edge_iterator ei;
3246 gimple_stmt_iterator gsi, dgsi;
3247 basic_block succbb;
3248 bool any_clobbers = false;
3250 /* Only optimize if BB has a single EH successor and
3251 all predecessor edges are EH too. */
3252 if (!single_succ_p (bb)
3253 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3254 return 0;
3256 FOR_EACH_EDGE (e, ei, bb->preds)
3258 if ((e->flags & EDGE_EH) == 0)
3259 return 0;
3262 /* And BB contains only CLOBBER stmts before the final
3263 RESX. */
3264 gsi = gsi_last_bb (bb);
3265 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3267 gimple stmt = gsi_stmt (gsi);
3268 if (is_gimple_debug (stmt))
3269 continue;
3270 if (gimple_code (stmt) == GIMPLE_LABEL)
3271 break;
3272 if (!gimple_clobber_p (stmt)
3273 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3274 return 0;
3275 any_clobbers = true;
3277 if (!any_clobbers)
3278 return 0;
3280 succbb = single_succ (bb);
3281 dgsi = gsi_after_labels (succbb);
3282 gsi = gsi_last_bb (bb);
3283 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3285 gimple stmt = gsi_stmt (gsi);
3286 if (is_gimple_debug (stmt))
3287 continue;
3288 if (gimple_code (stmt) == GIMPLE_LABEL)
3289 break;
3290 unlink_stmt_vdef (stmt);
3291 gsi_remove (&gsi, false);
3292 /* Trigger the operand scanner to cause renaming for virtual
3293 operands for this statement.
3294 ??? Given the simple structure of this code manually
3295 figuring out the reaching definition should not be too hard. */
3296 if (gimple_vuse (stmt))
3297 gimple_set_vuse (stmt, NULL_TREE);
3298 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3301 return TODO_update_ssa_only_virtuals;
3304 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3305 we have found some duplicate labels and removed some edges. */
3307 static bool
3308 lower_eh_dispatch (basic_block src, gimple stmt)
3310 gimple_stmt_iterator gsi;
3311 int region_nr;
3312 eh_region r;
3313 tree filter, fn;
3314 gimple x;
3315 bool redirected = false;
3317 region_nr = gimple_eh_dispatch_region (stmt);
3318 r = get_eh_region_from_number (region_nr);
3320 gsi = gsi_last_bb (src);
3322 switch (r->type)
3324 case ERT_TRY:
3326 VEC (tree, heap) *labels = NULL;
3327 tree default_label = NULL;
3328 eh_catch c;
3329 edge_iterator ei;
3330 edge e;
3331 struct pointer_set_t *seen_values = pointer_set_create ();
3333 /* Collect the labels for a switch. Zero the post_landing_pad
3334 field becase we'll no longer have anything keeping these labels
3335 in existence and the optimizer will be free to merge these
3336 blocks at will. */
3337 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3339 tree tp_node, flt_node, lab = c->label;
3340 bool have_label = false;
3342 c->label = NULL;
3343 tp_node = c->type_list;
3344 flt_node = c->filter_list;
3346 if (tp_node == NULL)
3348 default_label = lab;
3349 break;
3353 /* Filter out duplicate labels that arise when this handler
3354 is shadowed by an earlier one. When no labels are
3355 attached to the handler anymore, we remove
3356 the corresponding edge and then we delete unreachable
3357 blocks at the end of this pass. */
3358 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3360 tree t = build_case_label (TREE_VALUE (flt_node),
3361 NULL, lab);
3362 VEC_safe_push (tree, heap, labels, t);
3363 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3364 have_label = true;
3367 tp_node = TREE_CHAIN (tp_node);
3368 flt_node = TREE_CHAIN (flt_node);
3370 while (tp_node);
3371 if (! have_label)
3373 remove_edge (find_edge (src, label_to_block (lab)));
3374 redirected = true;
3378 /* Clean up the edge flags. */
3379 FOR_EACH_EDGE (e, ei, src->succs)
3381 if (e->flags & EDGE_FALLTHRU)
3383 /* If there was no catch-all, use the fallthru edge. */
3384 if (default_label == NULL)
3385 default_label = gimple_block_label (e->dest);
3386 e->flags &= ~EDGE_FALLTHRU;
3389 gcc_assert (default_label != NULL);
3391 /* Don't generate a switch if there's only a default case.
3392 This is common in the form of try { A; } catch (...) { B; }. */
3393 if (labels == NULL)
3395 e = single_succ_edge (src);
3396 e->flags |= EDGE_FALLTHRU;
3398 else
3400 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3401 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3402 region_nr));
3403 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3404 filter = make_ssa_name (filter, x);
3405 gimple_call_set_lhs (x, filter);
3406 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3408 /* Turn the default label into a default case. */
3409 default_label = build_case_label (NULL, NULL, default_label);
3410 sort_case_labels (labels);
3412 x = gimple_build_switch (filter, default_label, labels);
3413 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3415 VEC_free (tree, heap, labels);
3417 pointer_set_destroy (seen_values);
3419 break;
3421 case ERT_ALLOWED_EXCEPTIONS:
3423 edge b_e = BRANCH_EDGE (src);
3424 edge f_e = FALLTHRU_EDGE (src);
3426 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3427 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3428 region_nr));
3429 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3430 filter = make_ssa_name (filter, x);
3431 gimple_call_set_lhs (x, filter);
3432 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3434 r->u.allowed.label = NULL;
3435 x = gimple_build_cond (EQ_EXPR, filter,
3436 build_int_cst (TREE_TYPE (filter),
3437 r->u.allowed.filter),
3438 NULL_TREE, NULL_TREE);
3439 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3441 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3442 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3444 break;
3446 default:
3447 gcc_unreachable ();
3450 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3451 gsi_remove (&gsi, true);
3452 return redirected;
3455 static unsigned
3456 execute_lower_eh_dispatch (void)
3458 basic_block bb;
3459 int flags = 0;
3460 bool redirected = false;
3462 assign_filter_values ();
3464 FOR_EACH_BB (bb)
3466 gimple last = last_stmt (bb);
3467 if (last == NULL)
3468 continue;
3469 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3471 redirected |= lower_eh_dispatch (bb, last);
3472 flags |= TODO_update_ssa_only_virtuals;
3474 else if (gimple_code (last) == GIMPLE_RESX)
3476 if (stmt_can_throw_external (last))
3477 optimize_clobbers (bb);
3478 else
3479 flags |= sink_clobbers (bb);
3483 if (redirected)
3484 delete_unreachable_blocks ();
3485 return flags;
3488 static bool
3489 gate_lower_eh_dispatch (void)
3491 return cfun->eh->region_tree != NULL;
3494 struct gimple_opt_pass pass_lower_eh_dispatch =
3497 GIMPLE_PASS,
3498 "ehdisp", /* name */
3499 gate_lower_eh_dispatch, /* gate */
3500 execute_lower_eh_dispatch, /* execute */
3501 NULL, /* sub */
3502 NULL, /* next */
3503 0, /* static_pass_number */
3504 TV_TREE_EH, /* tv_id */
3505 PROP_gimple_lcf, /* properties_required */
3506 0, /* properties_provided */
3507 0, /* properties_destroyed */
3508 0, /* todo_flags_start */
3509 TODO_verify_flow /* todo_flags_finish */
3513 /* Walk statements, see what regions are really referenced and remove
3514 those that are unused. */
3516 static void
3517 remove_unreachable_handlers (void)
3519 sbitmap r_reachable, lp_reachable;
3520 eh_region region;
3521 eh_landing_pad lp;
3522 basic_block bb;
3523 int lp_nr, r_nr;
3525 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3526 lp_reachable
3527 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3528 sbitmap_zero (r_reachable);
3529 sbitmap_zero (lp_reachable);
3531 FOR_EACH_BB (bb)
3533 gimple_stmt_iterator gsi;
3535 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3537 gimple stmt = gsi_stmt (gsi);
3538 lp_nr = lookup_stmt_eh_lp (stmt);
3540 /* Negative LP numbers are MUST_NOT_THROW regions which
3541 are not considered BB enders. */
3542 if (lp_nr < 0)
3543 SET_BIT (r_reachable, -lp_nr);
3545 /* Positive LP numbers are real landing pads, are are BB enders. */
3546 else if (lp_nr > 0)
3548 gcc_assert (gsi_one_before_end_p (gsi));
3549 region = get_eh_region_from_lp_number (lp_nr);
3550 SET_BIT (r_reachable, region->index);
3551 SET_BIT (lp_reachable, lp_nr);
3554 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3555 switch (gimple_code (stmt))
3557 case GIMPLE_RESX:
3558 SET_BIT (r_reachable, gimple_resx_region (stmt));
3559 break;
3560 case GIMPLE_EH_DISPATCH:
3561 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3562 break;
3563 default:
3564 break;
3569 if (dump_file)
3571 fprintf (dump_file, "Before removal of unreachable regions:\n");
3572 dump_eh_tree (dump_file, cfun);
3573 fprintf (dump_file, "Reachable regions: ");
3574 dump_sbitmap_file (dump_file, r_reachable);
3575 fprintf (dump_file, "Reachable landing pads: ");
3576 dump_sbitmap_file (dump_file, lp_reachable);
3579 for (r_nr = 1;
3580 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3581 if (region && !TEST_BIT (r_reachable, r_nr))
3583 if (dump_file)
3584 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3585 remove_eh_handler (region);
3588 for (lp_nr = 1;
3589 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3590 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3592 if (dump_file)
3593 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3594 remove_eh_landing_pad (lp);
3597 if (dump_file)
3599 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3600 dump_eh_tree (dump_file, cfun);
3601 fprintf (dump_file, "\n\n");
3604 sbitmap_free (r_reachable);
3605 sbitmap_free (lp_reachable);
3607 #ifdef ENABLE_CHECKING
3608 verify_eh_tree (cfun);
3609 #endif
3612 /* Remove unreachable handlers if any landing pads have been removed after
3613 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3615 void
3616 maybe_remove_unreachable_handlers (void)
3618 eh_landing_pad lp;
3619 int i;
3621 if (cfun->eh == NULL)
3622 return;
3624 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3625 if (lp && lp->post_landing_pad)
3627 if (label_to_block (lp->post_landing_pad) == NULL)
3629 remove_unreachable_handlers ();
3630 return;
3635 /* Remove regions that do not have landing pads. This assumes
3636 that remove_unreachable_handlers has already been run, and
3637 that we've just manipulated the landing pads since then. */
3639 static void
3640 remove_unreachable_handlers_no_lp (void)
3642 eh_region r;
3643 int i;
3644 sbitmap r_reachable;
3645 basic_block bb;
3647 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3648 sbitmap_zero (r_reachable);
3650 FOR_EACH_BB (bb)
3652 gimple stmt = last_stmt (bb);
3653 if (stmt)
3654 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3655 switch (gimple_code (stmt))
3657 case GIMPLE_RESX:
3658 SET_BIT (r_reachable, gimple_resx_region (stmt));
3659 break;
3660 case GIMPLE_EH_DISPATCH:
3661 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3662 break;
3663 default:
3664 break;
3668 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3669 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3670 && !TEST_BIT (r_reachable, i))
3672 if (dump_file)
3673 fprintf (dump_file, "Removing unreachable region %d\n", i);
3674 remove_eh_handler (r);
3677 sbitmap_free (r_reachable);
3680 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3681 optimisticaly split all sorts of edges, including EH edges. The
3682 optimization passes in between may not have needed them; if not,
3683 we should undo the split.
3685 Recognize this case by having one EH edge incoming to the BB and
3686 one normal edge outgoing; BB should be empty apart from the
3687 post_landing_pad label.
3689 Note that this is slightly different from the empty handler case
3690 handled by cleanup_empty_eh, in that the actual handler may yet
3691 have actual code but the landing pad has been separated from the
3692 handler. As such, cleanup_empty_eh relies on this transformation
3693 having been done first. */
3695 static bool
3696 unsplit_eh (eh_landing_pad lp)
3698 basic_block bb = label_to_block (lp->post_landing_pad);
3699 gimple_stmt_iterator gsi;
3700 edge e_in, e_out;
3702 /* Quickly check the edge counts on BB for singularity. */
3703 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3704 return false;
3705 e_in = EDGE_PRED (bb, 0);
3706 e_out = EDGE_SUCC (bb, 0);
3708 /* Input edge must be EH and output edge must be normal. */
3709 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3710 return false;
3712 /* The block must be empty except for the labels and debug insns. */
3713 gsi = gsi_after_labels (bb);
3714 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3715 gsi_next_nondebug (&gsi);
3716 if (!gsi_end_p (gsi))
3717 return false;
3719 /* The destination block must not already have a landing pad
3720 for a different region. */
3721 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3723 gimple stmt = gsi_stmt (gsi);
3724 tree lab;
3725 int lp_nr;
3727 if (gimple_code (stmt) != GIMPLE_LABEL)
3728 break;
3729 lab = gimple_label_label (stmt);
3730 lp_nr = EH_LANDING_PAD_NR (lab);
3731 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3732 return false;
3735 /* The new destination block must not already be a destination of
3736 the source block, lest we merge fallthru and eh edges and get
3737 all sorts of confused. */
3738 if (find_edge (e_in->src, e_out->dest))
3739 return false;
3741 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3742 thought this should have been cleaned up by a phicprop pass, but
3743 that doesn't appear to handle virtuals. Propagate by hand. */
3744 if (!gimple_seq_empty_p (phi_nodes (bb)))
3746 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3748 gimple use_stmt, phi = gsi_stmt (gsi);
3749 tree lhs = gimple_phi_result (phi);
3750 tree rhs = gimple_phi_arg_def (phi, 0);
3751 use_operand_p use_p;
3752 imm_use_iterator iter;
3754 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3756 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3757 SET_USE (use_p, rhs);
3760 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3761 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3763 remove_phi_node (&gsi, true);
3767 if (dump_file && (dump_flags & TDF_DETAILS))
3768 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3769 lp->index, e_out->dest->index);
3771 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3772 a successor edge, humor it. But do the real CFG change with the
3773 predecessor of E_OUT in order to preserve the ordering of arguments
3774 to the PHI nodes in E_OUT->DEST. */
3775 redirect_eh_edge_1 (e_in, e_out->dest, false);
3776 redirect_edge_pred (e_out, e_in->src);
3777 e_out->flags = e_in->flags;
3778 e_out->probability = e_in->probability;
3779 e_out->count = e_in->count;
3780 remove_edge (e_in);
3782 return true;
3785 /* Examine each landing pad block and see if it matches unsplit_eh. */
3787 static bool
3788 unsplit_all_eh (void)
3790 bool changed = false;
3791 eh_landing_pad lp;
3792 int i;
3794 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3795 if (lp)
3796 changed |= unsplit_eh (lp);
3798 return changed;
3801 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3802 to OLD_BB to NEW_BB; return true on success, false on failure.
3804 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3805 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3806 Virtual PHIs may be deleted and marked for renaming. */
3808 static bool
3809 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3810 edge old_bb_out, bool change_region)
3812 gimple_stmt_iterator ngsi, ogsi;
3813 edge_iterator ei;
3814 edge e;
3815 bitmap rename_virts;
3816 bitmap ophi_handled;
3818 /* The destination block must not be a regular successor for any
3819 of the preds of the landing pad. Thus, avoid turning
3820 <..>
3821 | \ EH
3822 | <..>
3824 <..>
3825 into
3826 <..>
3827 | | EH
3828 <..>
3829 which CFG verification would choke on. See PR45172 and PR51089. */
3830 FOR_EACH_EDGE (e, ei, old_bb->preds)
3831 if (find_edge (e->src, new_bb))
3832 return false;
3834 FOR_EACH_EDGE (e, ei, old_bb->preds)
3835 redirect_edge_var_map_clear (e);
3837 ophi_handled = BITMAP_ALLOC (NULL);
3838 rename_virts = BITMAP_ALLOC (NULL);
3840 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3841 for the edges we're going to move. */
3842 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3844 gimple ophi, nphi = gsi_stmt (ngsi);
3845 tree nresult, nop;
3847 nresult = gimple_phi_result (nphi);
3848 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3850 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3851 the source ssa_name. */
3852 ophi = NULL;
3853 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3855 ophi = gsi_stmt (ogsi);
3856 if (gimple_phi_result (ophi) == nop)
3857 break;
3858 ophi = NULL;
3861 /* If we did find the corresponding PHI, copy those inputs. */
3862 if (ophi)
3864 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3865 if (!has_single_use (nop))
3867 imm_use_iterator imm_iter;
3868 use_operand_p use_p;
3870 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3872 if (!gimple_debug_bind_p (USE_STMT (use_p))
3873 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3874 || gimple_bb (USE_STMT (use_p)) != new_bb))
3875 goto fail;
3878 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3879 FOR_EACH_EDGE (e, ei, old_bb->preds)
3881 location_t oloc;
3882 tree oop;
3884 if ((e->flags & EDGE_EH) == 0)
3885 continue;
3886 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3887 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3888 redirect_edge_var_map_add (e, nresult, oop, oloc);
3891 /* If we didn't find the PHI, but it's a VOP, remember to rename
3892 it later, assuming all other tests succeed. */
3893 else if (virtual_operand_p (nresult))
3894 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3895 /* If we didn't find the PHI, and it's a real variable, we know
3896 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3897 variable is unchanged from input to the block and we can simply
3898 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3899 else
3901 location_t nloc
3902 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3903 FOR_EACH_EDGE (e, ei, old_bb->preds)
3904 redirect_edge_var_map_add (e, nresult, nop, nloc);
3908 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3909 we don't know what values from the other edges into NEW_BB to use. */
3910 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3912 gimple ophi = gsi_stmt (ogsi);
3913 tree oresult = gimple_phi_result (ophi);
3914 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3915 goto fail;
3918 /* At this point we know that the merge will succeed. Remove the PHI
3919 nodes for the virtuals that we want to rename. */
3920 if (!bitmap_empty_p (rename_virts))
3922 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3924 gimple nphi = gsi_stmt (ngsi);
3925 tree nresult = gimple_phi_result (nphi);
3926 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3928 mark_virtual_phi_result_for_renaming (nphi);
3929 remove_phi_node (&ngsi, true);
3931 else
3932 gsi_next (&ngsi);
3936 /* Finally, move the edges and update the PHIs. */
3937 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3938 if (e->flags & EDGE_EH)
3940 /* ??? CFG manipluation routines do not try to update loop
3941 form on edge redirection. Do so manually here for now. */
3942 /* If we redirect a loop entry or latch edge that will either create
3943 a multiple entry loop or rotate the loop. If the loops merge
3944 we may have created a loop with multiple latches.
3945 All of this isn't easily fixed thus cancel the affected loop
3946 and mark the other loop as possibly having multiple latches. */
3947 if (current_loops
3948 && e->dest == e->dest->loop_father->header)
3950 e->dest->loop_father->header = NULL;
3951 e->dest->loop_father->latch = NULL;
3952 new_bb->loop_father->latch = NULL;
3953 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3955 redirect_eh_edge_1 (e, new_bb, change_region);
3956 redirect_edge_succ (e, new_bb);
3957 flush_pending_stmts (e);
3959 else
3960 ei_next (&ei);
3962 BITMAP_FREE (ophi_handled);
3963 BITMAP_FREE (rename_virts);
3964 return true;
3966 fail:
3967 FOR_EACH_EDGE (e, ei, old_bb->preds)
3968 redirect_edge_var_map_clear (e);
3969 BITMAP_FREE (ophi_handled);
3970 BITMAP_FREE (rename_virts);
3971 return false;
3974 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3975 old region to NEW_REGION at BB. */
3977 static void
3978 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3979 eh_landing_pad lp, eh_region new_region)
3981 gimple_stmt_iterator gsi;
3982 eh_landing_pad *pp;
3984 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3985 continue;
3986 *pp = lp->next_lp;
3988 lp->region = new_region;
3989 lp->next_lp = new_region->landing_pads;
3990 new_region->landing_pads = lp;
3992 /* Delete the RESX that was matched within the empty handler block. */
3993 gsi = gsi_last_bb (bb);
3994 unlink_stmt_vdef (gsi_stmt (gsi));
3995 gsi_remove (&gsi, true);
3997 /* Clean up E_OUT for the fallthru. */
3998 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3999 e_out->probability = REG_BR_PROB_BASE;
4002 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4003 unsplitting than unsplit_eh was prepared to handle, e.g. when
4004 multiple incoming edges and phis are involved. */
4006 static bool
4007 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4009 gimple_stmt_iterator gsi;
4010 tree lab;
4012 /* We really ought not have totally lost everything following
4013 a landing pad label. Given that BB is empty, there had better
4014 be a successor. */
4015 gcc_assert (e_out != NULL);
4017 /* The destination block must not already have a landing pad
4018 for a different region. */
4019 lab = NULL;
4020 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4022 gimple stmt = gsi_stmt (gsi);
4023 int lp_nr;
4025 if (gimple_code (stmt) != GIMPLE_LABEL)
4026 break;
4027 lab = gimple_label_label (stmt);
4028 lp_nr = EH_LANDING_PAD_NR (lab);
4029 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4030 return false;
4033 /* Attempt to move the PHIs into the successor block. */
4034 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4036 if (dump_file && (dump_flags & TDF_DETAILS))
4037 fprintf (dump_file,
4038 "Unsplit EH landing pad %d to block %i "
4039 "(via cleanup_empty_eh).\n",
4040 lp->index, e_out->dest->index);
4041 return true;
4044 return false;
4047 /* Return true if edge E_FIRST is part of an empty infinite loop
4048 or leads to such a loop through a series of single successor
4049 empty bbs. */
4051 static bool
4052 infinite_empty_loop_p (edge e_first)
4054 bool inf_loop = false;
4055 edge e;
4057 if (e_first->dest == e_first->src)
4058 return true;
4060 e_first->src->aux = (void *) 1;
4061 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4063 gimple_stmt_iterator gsi;
4064 if (e->dest->aux)
4066 inf_loop = true;
4067 break;
4069 e->dest->aux = (void *) 1;
4070 gsi = gsi_after_labels (e->dest);
4071 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4072 gsi_next_nondebug (&gsi);
4073 if (!gsi_end_p (gsi))
4074 break;
4076 e_first->src->aux = NULL;
4077 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4078 e->dest->aux = NULL;
4080 return inf_loop;
4083 /* Examine the block associated with LP to determine if it's an empty
4084 handler for its EH region. If so, attempt to redirect EH edges to
4085 an outer region. Return true the CFG was updated in any way. This
4086 is similar to jump forwarding, just across EH edges. */
4088 static bool
4089 cleanup_empty_eh (eh_landing_pad lp)
4091 basic_block bb = label_to_block (lp->post_landing_pad);
4092 gimple_stmt_iterator gsi;
4093 gimple resx;
4094 eh_region new_region;
4095 edge_iterator ei;
4096 edge e, e_out;
4097 bool has_non_eh_pred;
4098 bool ret = false;
4099 int new_lp_nr;
4101 /* There can be zero or one edges out of BB. This is the quickest test. */
4102 switch (EDGE_COUNT (bb->succs))
4104 case 0:
4105 e_out = NULL;
4106 break;
4107 case 1:
4108 e_out = EDGE_SUCC (bb, 0);
4109 break;
4110 default:
4111 return false;
4114 resx = last_stmt (bb);
4115 if (resx && is_gimple_resx (resx))
4117 if (stmt_can_throw_external (resx))
4118 optimize_clobbers (bb);
4119 else if (sink_clobbers (bb))
4120 ret = true;
4123 gsi = gsi_after_labels (bb);
4125 /* Make sure to skip debug statements. */
4126 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4127 gsi_next_nondebug (&gsi);
4129 /* If the block is totally empty, look for more unsplitting cases. */
4130 if (gsi_end_p (gsi))
4132 /* For the degenerate case of an infinite loop bail out. */
4133 if (infinite_empty_loop_p (e_out))
4134 return ret;
4136 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4139 /* The block should consist only of a single RESX statement, modulo a
4140 preceding call to __builtin_stack_restore if there is no outgoing
4141 edge, since the call can be eliminated in this case. */
4142 resx = gsi_stmt (gsi);
4143 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4145 gsi_next (&gsi);
4146 resx = gsi_stmt (gsi);
4148 if (!is_gimple_resx (resx))
4149 return ret;
4150 gcc_assert (gsi_one_before_end_p (gsi));
4152 /* Determine if there are non-EH edges, or resx edges into the handler. */
4153 has_non_eh_pred = false;
4154 FOR_EACH_EDGE (e, ei, bb->preds)
4155 if (!(e->flags & EDGE_EH))
4156 has_non_eh_pred = true;
4158 /* Find the handler that's outer of the empty handler by looking at
4159 where the RESX instruction was vectored. */
4160 new_lp_nr = lookup_stmt_eh_lp (resx);
4161 new_region = get_eh_region_from_lp_number (new_lp_nr);
4163 /* If there's no destination region within the current function,
4164 redirection is trivial via removing the throwing statements from
4165 the EH region, removing the EH edges, and allowing the block
4166 to go unreachable. */
4167 if (new_region == NULL)
4169 gcc_assert (e_out == NULL);
4170 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4171 if (e->flags & EDGE_EH)
4173 gimple stmt = last_stmt (e->src);
4174 remove_stmt_from_eh_lp (stmt);
4175 remove_edge (e);
4177 else
4178 ei_next (&ei);
4179 goto succeed;
4182 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4183 to handle the abort and allow the blocks to go unreachable. */
4184 if (new_region->type == ERT_MUST_NOT_THROW)
4186 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4187 if (e->flags & EDGE_EH)
4189 gimple stmt = last_stmt (e->src);
4190 remove_stmt_from_eh_lp (stmt);
4191 add_stmt_to_eh_lp (stmt, new_lp_nr);
4192 remove_edge (e);
4194 else
4195 ei_next (&ei);
4196 goto succeed;
4199 /* Try to redirect the EH edges and merge the PHIs into the destination
4200 landing pad block. If the merge succeeds, we'll already have redirected
4201 all the EH edges. The handler itself will go unreachable if there were
4202 no normal edges. */
4203 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4204 goto succeed;
4206 /* Finally, if all input edges are EH edges, then we can (potentially)
4207 reduce the number of transfers from the runtime by moving the landing
4208 pad from the original region to the new region. This is a win when
4209 we remove the last CLEANUP region along a particular exception
4210 propagation path. Since nothing changes except for the region with
4211 which the landing pad is associated, the PHI nodes do not need to be
4212 adjusted at all. */
4213 if (!has_non_eh_pred)
4215 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4216 if (dump_file && (dump_flags & TDF_DETAILS))
4217 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4218 lp->index, new_region->index);
4220 /* ??? The CFG didn't change, but we may have rendered the
4221 old EH region unreachable. Trigger a cleanup there. */
4222 return true;
4225 return ret;
4227 succeed:
4228 if (dump_file && (dump_flags & TDF_DETAILS))
4229 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4230 remove_eh_landing_pad (lp);
4231 return true;
4234 /* Do a post-order traversal of the EH region tree. Examine each
4235 post_landing_pad block and see if we can eliminate it as empty. */
4237 static bool
4238 cleanup_all_empty_eh (void)
4240 bool changed = false;
4241 eh_landing_pad lp;
4242 int i;
4244 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4245 if (lp)
4246 changed |= cleanup_empty_eh (lp);
4248 return changed;
4251 /* Perform cleanups and lowering of exception handling
4252 1) cleanups regions with handlers doing nothing are optimized out
4253 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4254 3) Info about regions that are containing instructions, and regions
4255 reachable via local EH edges is collected
4256 4) Eh tree is pruned for regions no longer neccesary.
4258 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4259 Unify those that have the same failure decl and locus.
4262 static unsigned int
4263 execute_cleanup_eh_1 (void)
4265 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4266 looking up unreachable landing pads. */
4267 remove_unreachable_handlers ();
4269 /* Watch out for the region tree vanishing due to all unreachable. */
4270 if (cfun->eh->region_tree && optimize)
4272 bool changed = false;
4274 changed |= unsplit_all_eh ();
4275 changed |= cleanup_all_empty_eh ();
4277 if (changed)
4279 free_dominance_info (CDI_DOMINATORS);
4280 free_dominance_info (CDI_POST_DOMINATORS);
4282 /* We delayed all basic block deletion, as we may have performed
4283 cleanups on EH edges while non-EH edges were still present. */
4284 delete_unreachable_blocks ();
4286 /* We manipulated the landing pads. Remove any region that no
4287 longer has a landing pad. */
4288 remove_unreachable_handlers_no_lp ();
4290 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4294 return 0;
4297 static unsigned int
4298 execute_cleanup_eh (void)
4300 int ret = execute_cleanup_eh_1 ();
4302 /* If the function no longer needs an EH personality routine
4303 clear it. This exposes cross-language inlining opportunities
4304 and avoids references to a never defined personality routine. */
4305 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4306 && function_needs_eh_personality (cfun) != eh_personality_lang)
4307 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4309 return ret;
4312 static bool
4313 gate_cleanup_eh (void)
4315 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4318 struct gimple_opt_pass pass_cleanup_eh = {
4320 GIMPLE_PASS,
4321 "ehcleanup", /* name */
4322 gate_cleanup_eh, /* gate */
4323 execute_cleanup_eh, /* execute */
4324 NULL, /* sub */
4325 NULL, /* next */
4326 0, /* static_pass_number */
4327 TV_TREE_EH, /* tv_id */
4328 PROP_gimple_lcf, /* properties_required */
4329 0, /* properties_provided */
4330 0, /* properties_destroyed */
4331 0, /* todo_flags_start */
4332 0 /* todo_flags_finish */
4336 /* Verify that BB containing STMT as the last statement, has precisely the
4337 edge that make_eh_edges would create. */
4339 DEBUG_FUNCTION bool
4340 verify_eh_edges (gimple stmt)
4342 basic_block bb = gimple_bb (stmt);
4343 eh_landing_pad lp = NULL;
4344 int lp_nr;
4345 edge_iterator ei;
4346 edge e, eh_edge;
4348 lp_nr = lookup_stmt_eh_lp (stmt);
4349 if (lp_nr > 0)
4350 lp = get_eh_landing_pad_from_number (lp_nr);
4352 eh_edge = NULL;
4353 FOR_EACH_EDGE (e, ei, bb->succs)
4355 if (e->flags & EDGE_EH)
4357 if (eh_edge)
4359 error ("BB %i has multiple EH edges", bb->index);
4360 return true;
4362 else
4363 eh_edge = e;
4367 if (lp == NULL)
4369 if (eh_edge)
4371 error ("BB %i can not throw but has an EH edge", bb->index);
4372 return true;
4374 return false;
4377 if (!stmt_could_throw_p (stmt))
4379 error ("BB %i last statement has incorrectly set lp", bb->index);
4380 return true;
4383 if (eh_edge == NULL)
4385 error ("BB %i is missing an EH edge", bb->index);
4386 return true;
4389 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4391 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4392 return true;
4395 return false;
4398 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4400 DEBUG_FUNCTION bool
4401 verify_eh_dispatch_edge (gimple stmt)
4403 eh_region r;
4404 eh_catch c;
4405 basic_block src, dst;
4406 bool want_fallthru = true;
4407 edge_iterator ei;
4408 edge e, fall_edge;
4410 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4411 src = gimple_bb (stmt);
4413 FOR_EACH_EDGE (e, ei, src->succs)
4414 gcc_assert (e->aux == NULL);
4416 switch (r->type)
4418 case ERT_TRY:
4419 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4421 dst = label_to_block (c->label);
4422 e = find_edge (src, dst);
4423 if (e == NULL)
4425 error ("BB %i is missing an edge", src->index);
4426 return true;
4428 e->aux = (void *)e;
4430 /* A catch-all handler doesn't have a fallthru. */
4431 if (c->type_list == NULL)
4433 want_fallthru = false;
4434 break;
4437 break;
4439 case ERT_ALLOWED_EXCEPTIONS:
4440 dst = label_to_block (r->u.allowed.label);
4441 e = find_edge (src, dst);
4442 if (e == NULL)
4444 error ("BB %i is missing an edge", src->index);
4445 return true;
4447 e->aux = (void *)e;
4448 break;
4450 default:
4451 gcc_unreachable ();
4454 fall_edge = NULL;
4455 FOR_EACH_EDGE (e, ei, src->succs)
4457 if (e->flags & EDGE_FALLTHRU)
4459 if (fall_edge != NULL)
4461 error ("BB %i too many fallthru edges", src->index);
4462 return true;
4464 fall_edge = e;
4466 else if (e->aux)
4467 e->aux = NULL;
4468 else
4470 error ("BB %i has incorrect edge", src->index);
4471 return true;
4474 if ((fall_edge != NULL) ^ want_fallthru)
4476 error ("BB %i has incorrect fallthru edge", src->index);
4477 return true;
4480 return false;