* config/sh/sh.md (prologue, epilogue): Use braced strings.
[official-gcc.git] / gcc / tree-eh.c
blob65f85c94605b71460bc4bebd149fbc2f7a0e73a7
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "except.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "ggc.h"
35 #include "diagnostic-core.h"
36 #include "gimple.h"
37 #include "target.h"
38 #include "cfgloop.h"
40 /* In some instances a tree and a gimple need to be stored in a same table,
41 i.e. in hash tables. This is a structure to do this. */
42 typedef union {tree *tp; tree t; gimple g;} treemple;
44 /* Nonzero if we are using EH to handle cleanups. */
45 static int using_eh_for_cleanups_p = 0;
47 void
48 using_eh_for_cleanups (void)
50 using_eh_for_cleanups_p = 1;
53 /* Misc functions used in this file. */
55 /* Remember and lookup EH landing pad data for arbitrary statements.
56 Really this means any statement that could_throw_p. We could
57 stuff this information into the stmt_ann data structure, but:
59 (1) We absolutely rely on this information being kept until
60 we get to rtl. Once we're done with lowering here, if we lose
61 the information there's no way to recover it!
63 (2) There are many more statements that *cannot* throw as
64 compared to those that can. We should be saving some amount
65 of space by only allocating memory for those that can throw. */
67 /* Add statement T in function IFUN to landing pad NUM. */
69 void
70 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
72 struct throw_stmt_node *n;
73 void **slot;
75 gcc_assert (num != 0);
77 n = ggc_alloc_throw_stmt_node ();
78 n->stmt = t;
79 n->lp_nr = num;
81 if (!get_eh_throw_stmt_table (ifun))
82 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
83 struct_ptr_eq,
84 ggc_free));
86 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
87 gcc_assert (!*slot);
88 *slot = n;
91 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
93 void
94 add_stmt_to_eh_lp (gimple t, int num)
96 add_stmt_to_eh_lp_fn (cfun, t, num);
99 /* Add statement T to the single EH landing pad in REGION. */
101 static void
102 record_stmt_eh_region (eh_region region, gimple t)
104 if (region == NULL)
105 return;
106 if (region->type == ERT_MUST_NOT_THROW)
107 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
108 else
110 eh_landing_pad lp = region->landing_pads;
111 if (lp == NULL)
112 lp = gen_eh_landing_pad (region);
113 else
114 gcc_assert (lp->next_lp == NULL);
115 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
120 /* Remove statement T in function IFUN from its EH landing pad. */
122 bool
123 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
125 struct throw_stmt_node dummy;
126 void **slot;
128 if (!get_eh_throw_stmt_table (ifun))
129 return false;
131 dummy.stmt = t;
132 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
133 NO_INSERT);
134 if (slot)
136 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
137 return true;
139 else
140 return false;
144 /* Remove statement T in the current function (cfun) from its
145 EH landing pad. */
147 bool
148 remove_stmt_from_eh_lp (gimple t)
150 return remove_stmt_from_eh_lp_fn (cfun, t);
153 /* Determine if statement T is inside an EH region in function IFUN.
154 Positive numbers indicate a landing pad index; negative numbers
155 indicate a MUST_NOT_THROW region index; zero indicates that the
156 statement is not recorded in the region table. */
159 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
161 struct throw_stmt_node *p, n;
163 if (ifun->eh->throw_stmt_table == NULL)
164 return 0;
166 n.stmt = t;
167 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
168 return p ? p->lp_nr : 0;
171 /* Likewise, but always use the current function. */
174 lookup_stmt_eh_lp (gimple t)
176 /* We can get called from initialized data when -fnon-call-exceptions
177 is on; prevent crash. */
178 if (!cfun)
179 return 0;
180 return lookup_stmt_eh_lp_fn (cfun, t);
183 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
184 nodes and LABEL_DECL nodes. We will use this during the second phase to
185 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
187 struct finally_tree_node
189 /* When storing a GIMPLE_TRY, we have to record a gimple. However
190 when deciding whether a GOTO to a certain LABEL_DECL (which is a
191 tree) leaves the TRY block, its necessary to record a tree in
192 this field. Thus a treemple is used. */
193 treemple child;
194 gimple parent;
197 /* Note that this table is *not* marked GTY. It is short-lived. */
198 static htab_t finally_tree;
200 static void
201 record_in_finally_tree (treemple child, gimple parent)
203 struct finally_tree_node *n;
204 void **slot;
206 n = XNEW (struct finally_tree_node);
207 n->child = child;
208 n->parent = parent;
210 slot = htab_find_slot (finally_tree, n, INSERT);
211 gcc_assert (!*slot);
212 *slot = n;
215 static void
216 collect_finally_tree (gimple stmt, gimple region);
218 /* Go through the gimple sequence. Works with collect_finally_tree to
219 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
221 static void
222 collect_finally_tree_1 (gimple_seq seq, gimple region)
224 gimple_stmt_iterator gsi;
226 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
227 collect_finally_tree (gsi_stmt (gsi), region);
230 static void
231 collect_finally_tree (gimple stmt, gimple region)
233 treemple temp;
235 switch (gimple_code (stmt))
237 case GIMPLE_LABEL:
238 temp.t = gimple_label_label (stmt);
239 record_in_finally_tree (temp, region);
240 break;
242 case GIMPLE_TRY:
243 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
245 temp.g = stmt;
246 record_in_finally_tree (temp, region);
247 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
248 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
250 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
252 collect_finally_tree_1 (gimple_try_eval (stmt), region);
253 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
255 break;
257 case GIMPLE_CATCH:
258 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
259 break;
261 case GIMPLE_EH_FILTER:
262 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
263 break;
265 case GIMPLE_EH_ELSE:
266 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
267 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
268 break;
270 default:
271 /* A type, a decl, or some kind of statement that we're not
272 interested in. Don't walk them. */
273 break;
278 /* Use the finally tree to determine if a jump from START to TARGET
279 would leave the try_finally node that START lives in. */
281 static bool
282 outside_finally_tree (treemple start, gimple target)
284 struct finally_tree_node n, *p;
288 n.child = start;
289 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
290 if (!p)
291 return true;
292 start.g = p->parent;
294 while (start.g != target);
296 return false;
299 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
300 nodes into a set of gotos, magic labels, and eh regions.
301 The eh region creation is straight-forward, but frobbing all the gotos
302 and such into shape isn't. */
304 /* The sequence into which we record all EH stuff. This will be
305 placed at the end of the function when we're all done. */
306 static gimple_seq eh_seq;
308 /* Record whether an EH region contains something that can throw,
309 indexed by EH region number. */
310 static bitmap eh_region_may_contain_throw_map;
312 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
313 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
314 The idea is to record a gimple statement for everything except for
315 the conditionals, which get their labels recorded. Since labels are
316 of type 'tree', we need this node to store both gimple and tree
317 objects. REPL_STMT is the sequence used to replace the goto/return
318 statement. CONT_STMT is used to store the statement that allows
319 the return/goto to jump to the original destination. */
321 struct goto_queue_node
323 treemple stmt;
324 location_t location;
325 gimple_seq repl_stmt;
326 gimple cont_stmt;
327 int index;
328 /* This is used when index >= 0 to indicate that stmt is a label (as
329 opposed to a goto stmt). */
330 int is_label;
333 /* State of the world while lowering. */
335 struct leh_state
337 /* What's "current" while constructing the eh region tree. These
338 correspond to variables of the same name in cfun->eh, which we
339 don't have easy access to. */
340 eh_region cur_region;
342 /* What's "current" for the purposes of __builtin_eh_pointer. For
343 a CATCH, this is the associated TRY. For an EH_FILTER, this is
344 the associated ALLOWED_EXCEPTIONS, etc. */
345 eh_region ehp_region;
347 /* Processing of TRY_FINALLY requires a bit more state. This is
348 split out into a separate structure so that we don't have to
349 copy so much when processing other nodes. */
350 struct leh_tf_state *tf;
353 struct leh_tf_state
355 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
356 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
357 this so that outside_finally_tree can reliably reference the tree used
358 in the collect_finally_tree data structures. */
359 gimple try_finally_expr;
360 gimple top_p;
362 /* While lowering a top_p usually it is expanded into multiple statements,
363 thus we need the following field to store them. */
364 gimple_seq top_p_seq;
366 /* The state outside this try_finally node. */
367 struct leh_state *outer;
369 /* The exception region created for it. */
370 eh_region region;
372 /* The goto queue. */
373 struct goto_queue_node *goto_queue;
374 size_t goto_queue_size;
375 size_t goto_queue_active;
377 /* Pointer map to help in searching goto_queue when it is large. */
378 struct pointer_map_t *goto_queue_map;
380 /* The set of unique labels seen as entries in the goto queue. */
381 VEC(tree,heap) *dest_array;
383 /* A label to be added at the end of the completed transformed
384 sequence. It will be set if may_fallthru was true *at one time*,
385 though subsequent transformations may have cleared that flag. */
386 tree fallthru_label;
388 /* True if it is possible to fall out the bottom of the try block.
389 Cleared if the fallthru is converted to a goto. */
390 bool may_fallthru;
392 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
393 bool may_return;
395 /* True if the finally block can receive an exception edge.
396 Cleared if the exception case is handled by code duplication. */
397 bool may_throw;
400 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
402 /* Search for STMT in the goto queue. Return the replacement,
403 or null if the statement isn't in the queue. */
405 #define LARGE_GOTO_QUEUE 20
407 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
409 static gimple_seq
410 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
412 unsigned int i;
413 void **slot;
415 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
417 for (i = 0; i < tf->goto_queue_active; i++)
418 if ( tf->goto_queue[i].stmt.g == stmt.g)
419 return tf->goto_queue[i].repl_stmt;
420 return NULL;
423 /* If we have a large number of entries in the goto_queue, create a
424 pointer map and use that for searching. */
426 if (!tf->goto_queue_map)
428 tf->goto_queue_map = pointer_map_create ();
429 for (i = 0; i < tf->goto_queue_active; i++)
431 slot = pointer_map_insert (tf->goto_queue_map,
432 tf->goto_queue[i].stmt.g);
433 gcc_assert (*slot == NULL);
434 *slot = &tf->goto_queue[i];
438 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
439 if (slot != NULL)
440 return (((struct goto_queue_node *) *slot)->repl_stmt);
442 return NULL;
445 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
446 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
447 then we can just splat it in, otherwise we add the new stmts immediately
448 after the GIMPLE_COND and redirect. */
450 static void
451 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
452 gimple_stmt_iterator *gsi)
454 tree label;
455 gimple_seq new_seq;
456 treemple temp;
457 location_t loc = gimple_location (gsi_stmt (*gsi));
459 temp.tp = tp;
460 new_seq = find_goto_replacement (tf, temp);
461 if (!new_seq)
462 return;
464 if (gimple_seq_singleton_p (new_seq)
465 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
467 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
468 return;
471 label = create_artificial_label (loc);
472 /* Set the new label for the GIMPLE_COND */
473 *tp = label;
475 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
476 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
479 /* The real work of replace_goto_queue. Returns with TSI updated to
480 point to the next statement. */
482 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
484 static void
485 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
486 gimple_stmt_iterator *gsi)
488 gimple_seq seq;
489 treemple temp;
490 temp.g = NULL;
492 switch (gimple_code (stmt))
494 case GIMPLE_GOTO:
495 case GIMPLE_RETURN:
496 temp.g = stmt;
497 seq = find_goto_replacement (tf, temp);
498 if (seq)
500 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
501 gsi_remove (gsi, false);
502 return;
504 break;
506 case GIMPLE_COND:
507 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
508 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
509 break;
511 case GIMPLE_TRY:
512 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
513 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
514 break;
515 case GIMPLE_CATCH:
516 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
517 break;
518 case GIMPLE_EH_FILTER:
519 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
520 break;
521 case GIMPLE_EH_ELSE:
522 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
523 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
524 break;
526 default:
527 /* These won't have gotos in them. */
528 break;
531 gsi_next (gsi);
534 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
536 static void
537 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
539 gimple_stmt_iterator gsi = gsi_start (*seq);
541 while (!gsi_end_p (gsi))
542 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
545 /* Replace all goto queue members. */
547 static void
548 replace_goto_queue (struct leh_tf_state *tf)
550 if (tf->goto_queue_active == 0)
551 return;
552 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
553 replace_goto_queue_stmt_list (&eh_seq, tf);
556 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
557 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
558 a gimple return. */
560 static void
561 record_in_goto_queue (struct leh_tf_state *tf,
562 treemple new_stmt,
563 int index,
564 bool is_label,
565 location_t location)
567 size_t active, size;
568 struct goto_queue_node *q;
570 gcc_assert (!tf->goto_queue_map);
572 active = tf->goto_queue_active;
573 size = tf->goto_queue_size;
574 if (active >= size)
576 size = (size ? size * 2 : 32);
577 tf->goto_queue_size = size;
578 tf->goto_queue
579 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
582 q = &tf->goto_queue[active];
583 tf->goto_queue_active = active + 1;
585 memset (q, 0, sizeof (*q));
586 q->stmt = new_stmt;
587 q->index = index;
588 q->location = location;
589 q->is_label = is_label;
592 /* Record the LABEL label in the goto queue contained in TF.
593 TF is not null. */
595 static void
596 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
597 location_t location)
599 int index;
600 treemple temp, new_stmt;
602 if (!label)
603 return;
605 /* Computed and non-local gotos do not get processed. Given
606 their nature we can neither tell whether we've escaped the
607 finally block nor redirect them if we knew. */
608 if (TREE_CODE (label) != LABEL_DECL)
609 return;
611 /* No need to record gotos that don't leave the try block. */
612 temp.t = label;
613 if (!outside_finally_tree (temp, tf->try_finally_expr))
614 return;
616 if (! tf->dest_array)
618 tf->dest_array = VEC_alloc (tree, heap, 10);
619 VEC_quick_push (tree, tf->dest_array, label);
620 index = 0;
622 else
624 int n = VEC_length (tree, tf->dest_array);
625 for (index = 0; index < n; ++index)
626 if (VEC_index (tree, tf->dest_array, index) == label)
627 break;
628 if (index == n)
629 VEC_safe_push (tree, heap, tf->dest_array, label);
632 /* In the case of a GOTO we want to record the destination label,
633 since with a GIMPLE_COND we have an easy access to the then/else
634 labels. */
635 new_stmt = stmt;
636 record_in_goto_queue (tf, new_stmt, index, true, location);
639 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
640 node, and if so record that fact in the goto queue associated with that
641 try_finally node. */
643 static void
644 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
646 struct leh_tf_state *tf = state->tf;
647 treemple new_stmt;
649 if (!tf)
650 return;
652 switch (gimple_code (stmt))
654 case GIMPLE_COND:
655 new_stmt.tp = gimple_op_ptr (stmt, 2);
656 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
657 EXPR_LOCATION (*new_stmt.tp));
658 new_stmt.tp = gimple_op_ptr (stmt, 3);
659 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
660 EXPR_LOCATION (*new_stmt.tp));
661 break;
662 case GIMPLE_GOTO:
663 new_stmt.g = stmt;
664 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
665 gimple_location (stmt));
666 break;
668 case GIMPLE_RETURN:
669 tf->may_return = true;
670 new_stmt.g = stmt;
671 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
672 break;
674 default:
675 gcc_unreachable ();
680 #ifdef ENABLE_CHECKING
681 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
682 was in fact structured, and we've not yet done jump threading, then none
683 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
685 static void
686 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
688 struct leh_tf_state *tf = state->tf;
689 size_t i, n;
691 if (!tf)
692 return;
694 n = gimple_switch_num_labels (switch_expr);
696 for (i = 0; i < n; ++i)
698 treemple temp;
699 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
700 temp.t = lab;
701 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
704 #else
705 #define verify_norecord_switch_expr(state, switch_expr)
706 #endif
708 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
709 non-null, insert it before the new branch. */
711 static void
712 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
714 gimple x;
716 /* In the case of a return, the queue node must be a gimple statement. */
717 gcc_assert (!q->is_label);
719 /* Note that the return value may have already been computed, e.g.,
721 int x;
722 int foo (void)
724 x = 0;
725 try {
726 return x;
727 } finally {
728 x++;
732 should return 0, not 1. We don't have to do anything to make
733 this happens because the return value has been placed in the
734 RESULT_DECL already. */
736 q->cont_stmt = q->stmt.g;
738 if (mod)
739 gimple_seq_add_seq (&q->repl_stmt, mod);
741 x = gimple_build_goto (finlab);
742 gimple_seq_add_stmt (&q->repl_stmt, x);
745 /* Similar, but easier, for GIMPLE_GOTO. */
747 static void
748 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
749 struct leh_tf_state *tf)
751 gimple x;
753 gcc_assert (q->is_label);
755 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
757 if (mod)
758 gimple_seq_add_seq (&q->repl_stmt, mod);
760 x = gimple_build_goto (finlab);
761 gimple_seq_add_stmt (&q->repl_stmt, x);
764 /* Emit a standard landing pad sequence into SEQ for REGION. */
766 static void
767 emit_post_landing_pad (gimple_seq *seq, eh_region region)
769 eh_landing_pad lp = region->landing_pads;
770 gimple x;
772 if (lp == NULL)
773 lp = gen_eh_landing_pad (region);
775 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
776 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
778 x = gimple_build_label (lp->post_landing_pad);
779 gimple_seq_add_stmt (seq, x);
782 /* Emit a RESX statement into SEQ for REGION. */
784 static void
785 emit_resx (gimple_seq *seq, eh_region region)
787 gimple x = gimple_build_resx (region->index);
788 gimple_seq_add_stmt (seq, x);
789 if (region->outer)
790 record_stmt_eh_region (region->outer, x);
793 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
795 static void
796 emit_eh_dispatch (gimple_seq *seq, eh_region region)
798 gimple x = gimple_build_eh_dispatch (region->index);
799 gimple_seq_add_stmt (seq, x);
802 /* Note that the current EH region may contain a throw, or a
803 call to a function which itself may contain a throw. */
805 static void
806 note_eh_region_may_contain_throw (eh_region region)
808 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
810 if (region->type == ERT_MUST_NOT_THROW)
811 break;
812 region = region->outer;
813 if (region == NULL)
814 break;
818 /* Check if REGION has been marked as containing a throw. If REGION is
819 NULL, this predicate is false. */
821 static inline bool
822 eh_region_may_contain_throw (eh_region r)
824 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
827 /* We want to transform
828 try { body; } catch { stuff; }
830 normal_seqence:
831 body;
832 over:
833 eh_seqence:
834 landing_pad:
835 stuff;
836 goto over;
838 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
839 should be placed before the second operand, or NULL. OVER is
840 an existing label that should be put at the exit, or NULL. */
842 static gimple_seq
843 frob_into_branch_around (gimple tp, eh_region region, tree over)
845 gimple x;
846 gimple_seq cleanup, result;
847 location_t loc = gimple_location (tp);
849 cleanup = gimple_try_cleanup (tp);
850 result = gimple_try_eval (tp);
852 if (region)
853 emit_post_landing_pad (&eh_seq, region);
855 if (gimple_seq_may_fallthru (cleanup))
857 if (!over)
858 over = create_artificial_label (loc);
859 x = gimple_build_goto (over);
860 gimple_seq_add_stmt (&cleanup, x);
862 gimple_seq_add_seq (&eh_seq, cleanup);
864 if (over)
866 x = gimple_build_label (over);
867 gimple_seq_add_stmt (&result, x);
869 return result;
872 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
873 Make sure to record all new labels found. */
875 static gimple_seq
876 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
877 location_t loc)
879 gimple region = NULL;
880 gimple_seq new_seq;
881 gimple_stmt_iterator gsi;
883 new_seq = copy_gimple_seq_and_replace_locals (seq);
885 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
886 if (gimple_location (gsi_stmt (gsi)) == UNKNOWN_LOCATION)
887 gimple_set_location (gsi_stmt (gsi), loc);
889 if (outer_state->tf)
890 region = outer_state->tf->try_finally_expr;
891 collect_finally_tree_1 (new_seq, region);
893 return new_seq;
896 /* A subroutine of lower_try_finally. Create a fallthru label for
897 the given try_finally state. The only tricky bit here is that
898 we have to make sure to record the label in our outer context. */
900 static tree
901 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
903 tree label = tf->fallthru_label;
904 treemple temp;
906 if (!label)
908 label = create_artificial_label (gimple_location (tf->try_finally_expr));
909 tf->fallthru_label = label;
910 if (tf->outer->tf)
912 temp.t = label;
913 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
916 return label;
919 /* A subroutine of lower_try_finally. If FINALLY consits of a
920 GIMPLE_EH_ELSE node, return it. */
922 static inline gimple
923 get_eh_else (gimple_seq finally)
925 gimple x = gimple_seq_first_stmt (finally);
926 if (gimple_code (x) == GIMPLE_EH_ELSE)
928 gcc_assert (gimple_seq_singleton_p (finally));
929 return x;
931 return NULL;
934 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
935 langhook returns non-null, then the language requires that the exception
936 path out of a try_finally be treated specially. To wit: the code within
937 the finally block may not itself throw an exception. We have two choices
938 here. First we can duplicate the finally block and wrap it in a
939 must_not_throw region. Second, we can generate code like
941 try {
942 finally_block;
943 } catch {
944 if (fintmp == eh_edge)
945 protect_cleanup_actions;
948 where "fintmp" is the temporary used in the switch statement generation
949 alternative considered below. For the nonce, we always choose the first
950 option.
952 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
954 static void
955 honor_protect_cleanup_actions (struct leh_state *outer_state,
956 struct leh_state *this_state,
957 struct leh_tf_state *tf)
959 tree protect_cleanup_actions;
960 gimple_stmt_iterator gsi;
961 bool finally_may_fallthru;
962 gimple_seq finally;
963 gimple x, eh_else;
965 /* First check for nothing to do. */
966 if (lang_hooks.eh_protect_cleanup_actions == NULL)
967 return;
968 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
969 if (protect_cleanup_actions == NULL)
970 return;
972 finally = gimple_try_cleanup (tf->top_p);
973 eh_else = get_eh_else (finally);
975 /* Duplicate the FINALLY block. Only need to do this for try-finally,
976 and not for cleanups. If we've got an EH_ELSE, extract it now. */
977 if (eh_else)
979 finally = gimple_eh_else_e_body (eh_else);
980 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
982 else if (this_state)
983 finally = lower_try_finally_dup_block (finally, outer_state,
984 UNKNOWN_LOCATION);
985 finally_may_fallthru = gimple_seq_may_fallthru (finally);
987 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
988 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
989 to be in an enclosing scope, but needs to be implemented at this level
990 to avoid a nesting violation (see wrap_temporary_cleanups in
991 cp/decl.c). Since it's logically at an outer level, we should call
992 terminate before we get to it, so strip it away before adding the
993 MUST_NOT_THROW filter. */
994 gsi = gsi_start (finally);
995 x = gsi_stmt (gsi);
996 if (gimple_code (x) == GIMPLE_TRY
997 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
998 && gimple_try_catch_is_cleanup (x))
1000 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1001 gsi_remove (&gsi, false);
1004 /* Wrap the block with protect_cleanup_actions as the action. */
1005 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1006 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1007 GIMPLE_TRY_CATCH);
1008 finally = lower_eh_must_not_throw (outer_state, x);
1010 /* Drop all of this into the exception sequence. */
1011 emit_post_landing_pad (&eh_seq, tf->region);
1012 gimple_seq_add_seq (&eh_seq, finally);
1013 if (finally_may_fallthru)
1014 emit_resx (&eh_seq, tf->region);
1016 /* Having now been handled, EH isn't to be considered with
1017 the rest of the outgoing edges. */
1018 tf->may_throw = false;
1021 /* A subroutine of lower_try_finally. We have determined that there is
1022 no fallthru edge out of the finally block. This means that there is
1023 no outgoing edge corresponding to any incoming edge. Restructure the
1024 try_finally node for this special case. */
1026 static void
1027 lower_try_finally_nofallthru (struct leh_state *state,
1028 struct leh_tf_state *tf)
1030 tree lab;
1031 gimple x, eh_else;
1032 gimple_seq finally;
1033 struct goto_queue_node *q, *qe;
1035 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1037 /* We expect that tf->top_p is a GIMPLE_TRY. */
1038 finally = gimple_try_cleanup (tf->top_p);
1039 tf->top_p_seq = gimple_try_eval (tf->top_p);
1041 x = gimple_build_label (lab);
1042 gimple_seq_add_stmt (&tf->top_p_seq, x);
1044 q = tf->goto_queue;
1045 qe = q + tf->goto_queue_active;
1046 for (; q < qe; ++q)
1047 if (q->index < 0)
1048 do_return_redirection (q, lab, NULL);
1049 else
1050 do_goto_redirection (q, lab, NULL, tf);
1052 replace_goto_queue (tf);
1054 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1055 eh_else = get_eh_else (finally);
1056 if (eh_else)
1058 finally = gimple_eh_else_n_body (eh_else);
1059 lower_eh_constructs_1 (state, &finally);
1060 gimple_seq_add_seq (&tf->top_p_seq, finally);
1062 if (tf->may_throw)
1064 finally = gimple_eh_else_e_body (eh_else);
1065 lower_eh_constructs_1 (state, &finally);
1067 emit_post_landing_pad (&eh_seq, tf->region);
1068 gimple_seq_add_seq (&eh_seq, finally);
1071 else
1073 lower_eh_constructs_1 (state, &finally);
1074 gimple_seq_add_seq (&tf->top_p_seq, finally);
1076 if (tf->may_throw)
1078 emit_post_landing_pad (&eh_seq, tf->region);
1080 x = gimple_build_goto (lab);
1081 gimple_seq_add_stmt (&eh_seq, x);
1086 /* A subroutine of lower_try_finally. We have determined that there is
1087 exactly one destination of the finally block. Restructure the
1088 try_finally node for this special case. */
1090 static void
1091 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1093 struct goto_queue_node *q, *qe;
1094 gimple x;
1095 gimple_seq finally;
1096 tree finally_label;
1097 location_t loc = gimple_location (tf->try_finally_expr);
1099 finally = gimple_try_cleanup (tf->top_p);
1100 tf->top_p_seq = gimple_try_eval (tf->top_p);
1102 /* Since there's only one destination, and the destination edge can only
1103 either be EH or non-EH, that implies that all of our incoming edges
1104 are of the same type. Therefore we can lower EH_ELSE immediately. */
1105 x = get_eh_else (finally);
1106 if (x)
1108 if (tf->may_throw)
1109 finally = gimple_eh_else_e_body (x);
1110 else
1111 finally = gimple_eh_else_n_body (x);
1114 lower_eh_constructs_1 (state, &finally);
1116 if (tf->may_throw)
1118 /* Only reachable via the exception edge. Add the given label to
1119 the head of the FINALLY block. Append a RESX at the end. */
1120 emit_post_landing_pad (&eh_seq, tf->region);
1121 gimple_seq_add_seq (&eh_seq, finally);
1122 emit_resx (&eh_seq, tf->region);
1123 return;
1126 if (tf->may_fallthru)
1128 /* Only reachable via the fallthru edge. Do nothing but let
1129 the two blocks run together; we'll fall out the bottom. */
1130 gimple_seq_add_seq (&tf->top_p_seq, finally);
1131 return;
1134 finally_label = create_artificial_label (loc);
1135 x = gimple_build_label (finally_label);
1136 gimple_seq_add_stmt (&tf->top_p_seq, x);
1138 gimple_seq_add_seq (&tf->top_p_seq, finally);
1140 q = tf->goto_queue;
1141 qe = q + tf->goto_queue_active;
1143 if (tf->may_return)
1145 /* Reachable by return expressions only. Redirect them. */
1146 for (; q < qe; ++q)
1147 do_return_redirection (q, finally_label, NULL);
1148 replace_goto_queue (tf);
1150 else
1152 /* Reachable by goto expressions only. Redirect them. */
1153 for (; q < qe; ++q)
1154 do_goto_redirection (q, finally_label, NULL, tf);
1155 replace_goto_queue (tf);
1157 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1159 /* Reachable by goto to fallthru label only. Redirect it
1160 to the new label (already created, sadly), and do not
1161 emit the final branch out, or the fallthru label. */
1162 tf->fallthru_label = NULL;
1163 return;
1167 /* Place the original return/goto to the original destination
1168 immediately after the finally block. */
1169 x = tf->goto_queue[0].cont_stmt;
1170 gimple_seq_add_stmt (&tf->top_p_seq, x);
1171 maybe_record_in_goto_queue (state, x);
1174 /* A subroutine of lower_try_finally. There are multiple edges incoming
1175 and outgoing from the finally block. Implement this by duplicating the
1176 finally block for every destination. */
1178 static void
1179 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1181 gimple_seq finally;
1182 gimple_seq new_stmt;
1183 gimple_seq seq;
1184 gimple x, eh_else;
1185 tree tmp;
1186 location_t tf_loc = gimple_location (tf->try_finally_expr);
1188 finally = gimple_try_cleanup (tf->top_p);
1190 /* Notice EH_ELSE, and simplify some of the remaining code
1191 by considering FINALLY to be the normal return path only. */
1192 eh_else = get_eh_else (finally);
1193 if (eh_else)
1194 finally = gimple_eh_else_n_body (eh_else);
1196 tf->top_p_seq = gimple_try_eval (tf->top_p);
1197 new_stmt = NULL;
1199 if (tf->may_fallthru)
1201 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1202 lower_eh_constructs_1 (state, &seq);
1203 gimple_seq_add_seq (&new_stmt, seq);
1205 tmp = lower_try_finally_fallthru_label (tf);
1206 x = gimple_build_goto (tmp);
1207 gimple_seq_add_stmt (&new_stmt, x);
1210 if (tf->may_throw)
1212 /* We don't need to copy the EH path of EH_ELSE,
1213 since it is only emitted once. */
1214 if (eh_else)
1215 seq = gimple_eh_else_e_body (eh_else);
1216 else
1217 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1218 lower_eh_constructs_1 (state, &seq);
1220 emit_post_landing_pad (&eh_seq, tf->region);
1221 gimple_seq_add_seq (&eh_seq, seq);
1222 emit_resx (&eh_seq, tf->region);
1225 if (tf->goto_queue)
1227 struct goto_queue_node *q, *qe;
1228 int return_index, index;
1229 struct labels_s
1231 struct goto_queue_node *q;
1232 tree label;
1233 } *labels;
1235 return_index = VEC_length (tree, tf->dest_array);
1236 labels = XCNEWVEC (struct labels_s, return_index + 1);
1238 q = tf->goto_queue;
1239 qe = q + tf->goto_queue_active;
1240 for (; q < qe; q++)
1242 index = q->index < 0 ? return_index : q->index;
1244 if (!labels[index].q)
1245 labels[index].q = q;
1248 for (index = 0; index < return_index + 1; index++)
1250 tree lab;
1252 q = labels[index].q;
1253 if (! q)
1254 continue;
1256 lab = labels[index].label
1257 = create_artificial_label (tf_loc);
1259 if (index == return_index)
1260 do_return_redirection (q, lab, NULL);
1261 else
1262 do_goto_redirection (q, lab, NULL, tf);
1264 x = gimple_build_label (lab);
1265 gimple_seq_add_stmt (&new_stmt, x);
1267 seq = lower_try_finally_dup_block (finally, state, q->location);
1268 lower_eh_constructs_1 (state, &seq);
1269 gimple_seq_add_seq (&new_stmt, seq);
1271 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1272 maybe_record_in_goto_queue (state, q->cont_stmt);
1275 for (q = tf->goto_queue; q < qe; q++)
1277 tree lab;
1279 index = q->index < 0 ? return_index : q->index;
1281 if (labels[index].q == q)
1282 continue;
1284 lab = labels[index].label;
1286 if (index == return_index)
1287 do_return_redirection (q, lab, NULL);
1288 else
1289 do_goto_redirection (q, lab, NULL, tf);
1292 replace_goto_queue (tf);
1293 free (labels);
1296 /* Need to link new stmts after running replace_goto_queue due
1297 to not wanting to process the same goto stmts twice. */
1298 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1301 /* A subroutine of lower_try_finally. There are multiple edges incoming
1302 and outgoing from the finally block. Implement this by instrumenting
1303 each incoming edge and creating a switch statement at the end of the
1304 finally block that branches to the appropriate destination. */
1306 static void
1307 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1309 struct goto_queue_node *q, *qe;
1310 tree finally_tmp, finally_label;
1311 int return_index, eh_index, fallthru_index;
1312 int nlabels, ndests, j, last_case_index;
1313 tree last_case;
1314 VEC (tree,heap) *case_label_vec;
1315 gimple_seq switch_body = NULL;
1316 gimple x, eh_else;
1317 tree tmp;
1318 gimple switch_stmt;
1319 gimple_seq finally;
1320 struct pointer_map_t *cont_map = NULL;
1321 /* The location of the TRY_FINALLY stmt. */
1322 location_t tf_loc = gimple_location (tf->try_finally_expr);
1323 /* The location of the finally block. */
1324 location_t finally_loc;
1326 finally = gimple_try_cleanup (tf->top_p);
1327 eh_else = get_eh_else (finally);
1329 /* Mash the TRY block to the head of the chain. */
1330 tf->top_p_seq = gimple_try_eval (tf->top_p);
1332 /* The location of the finally is either the last stmt in the finally
1333 block or the location of the TRY_FINALLY itself. */
1334 x = gimple_seq_last_stmt (finally);
1335 finally_loc = x ? gimple_location (x) : tf_loc;
1337 /* Lower the finally block itself. */
1338 lower_eh_constructs_1 (state, &finally);
1340 /* Prepare for switch statement generation. */
1341 nlabels = VEC_length (tree, tf->dest_array);
1342 return_index = nlabels;
1343 eh_index = return_index + tf->may_return;
1344 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1345 ndests = fallthru_index + tf->may_fallthru;
1347 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1348 finally_label = create_artificial_label (finally_loc);
1350 /* We use VEC_quick_push on case_label_vec throughout this function,
1351 since we know the size in advance and allocate precisely as muce
1352 space as needed. */
1353 case_label_vec = VEC_alloc (tree, heap, ndests);
1354 last_case = NULL;
1355 last_case_index = 0;
1357 /* Begin inserting code for getting to the finally block. Things
1358 are done in this order to correspond to the sequence the code is
1359 laid out. */
1361 if (tf->may_fallthru)
1363 x = gimple_build_assign (finally_tmp,
1364 build_int_cst (integer_type_node,
1365 fallthru_index));
1366 gimple_seq_add_stmt (&tf->top_p_seq, x);
1368 tmp = build_int_cst (integer_type_node, fallthru_index);
1369 last_case = build_case_label (tmp, NULL,
1370 create_artificial_label (tf_loc));
1371 VEC_quick_push (tree, case_label_vec, last_case);
1372 last_case_index++;
1374 x = gimple_build_label (CASE_LABEL (last_case));
1375 gimple_seq_add_stmt (&switch_body, x);
1377 tmp = lower_try_finally_fallthru_label (tf);
1378 x = gimple_build_goto (tmp);
1379 gimple_seq_add_stmt (&switch_body, x);
1382 /* For EH_ELSE, emit the exception path (plus resx) now, then
1383 subsequently we only need consider the normal path. */
1384 if (eh_else)
1386 if (tf->may_throw)
1388 finally = gimple_eh_else_e_body (eh_else);
1389 lower_eh_constructs_1 (state, &finally);
1391 emit_post_landing_pad (&eh_seq, tf->region);
1392 gimple_seq_add_seq (&eh_seq, finally);
1393 emit_resx (&eh_seq, tf->region);
1396 finally = gimple_eh_else_n_body (eh_else);
1398 else if (tf->may_throw)
1400 emit_post_landing_pad (&eh_seq, tf->region);
1402 x = gimple_build_assign (finally_tmp,
1403 build_int_cst (integer_type_node, eh_index));
1404 gimple_seq_add_stmt (&eh_seq, x);
1406 x = gimple_build_goto (finally_label);
1407 gimple_seq_add_stmt (&eh_seq, x);
1409 tmp = build_int_cst (integer_type_node, eh_index);
1410 last_case = build_case_label (tmp, NULL,
1411 create_artificial_label (tf_loc));
1412 VEC_quick_push (tree, case_label_vec, last_case);
1413 last_case_index++;
1415 x = gimple_build_label (CASE_LABEL (last_case));
1416 gimple_seq_add_stmt (&eh_seq, x);
1417 emit_resx (&eh_seq, tf->region);
1420 x = gimple_build_label (finally_label);
1421 gimple_seq_add_stmt (&tf->top_p_seq, x);
1423 gimple_seq_add_seq (&tf->top_p_seq, finally);
1425 /* Redirect each incoming goto edge. */
1426 q = tf->goto_queue;
1427 qe = q + tf->goto_queue_active;
1428 j = last_case_index + tf->may_return;
1429 /* Prepare the assignments to finally_tmp that are executed upon the
1430 entrance through a particular edge. */
1431 for (; q < qe; ++q)
1433 gimple_seq mod = NULL;
1434 int switch_id;
1435 unsigned int case_index;
1437 if (q->index < 0)
1439 x = gimple_build_assign (finally_tmp,
1440 build_int_cst (integer_type_node,
1441 return_index));
1442 gimple_seq_add_stmt (&mod, x);
1443 do_return_redirection (q, finally_label, mod);
1444 switch_id = return_index;
1446 else
1448 x = gimple_build_assign (finally_tmp,
1449 build_int_cst (integer_type_node, q->index));
1450 gimple_seq_add_stmt (&mod, x);
1451 do_goto_redirection (q, finally_label, mod, tf);
1452 switch_id = q->index;
1455 case_index = j + q->index;
1456 if (VEC_length (tree, case_label_vec) <= case_index
1457 || !VEC_index (tree, case_label_vec, case_index))
1459 tree case_lab;
1460 void **slot;
1461 tmp = build_int_cst (integer_type_node, switch_id);
1462 case_lab = build_case_label (tmp, NULL,
1463 create_artificial_label (tf_loc));
1464 /* We store the cont_stmt in the pointer map, so that we can recover
1465 it in the loop below. */
1466 if (!cont_map)
1467 cont_map = pointer_map_create ();
1468 slot = pointer_map_insert (cont_map, case_lab);
1469 *slot = q->cont_stmt;
1470 VEC_quick_push (tree, case_label_vec, case_lab);
1473 for (j = last_case_index; j < last_case_index + nlabels; j++)
1475 gimple cont_stmt;
1476 void **slot;
1478 last_case = VEC_index (tree, case_label_vec, j);
1480 gcc_assert (last_case);
1481 gcc_assert (cont_map);
1483 slot = pointer_map_contains (cont_map, last_case);
1484 gcc_assert (slot);
1485 cont_stmt = *(gimple *) slot;
1487 x = gimple_build_label (CASE_LABEL (last_case));
1488 gimple_seq_add_stmt (&switch_body, x);
1489 gimple_seq_add_stmt (&switch_body, cont_stmt);
1490 maybe_record_in_goto_queue (state, cont_stmt);
1492 if (cont_map)
1493 pointer_map_destroy (cont_map);
1495 replace_goto_queue (tf);
1497 /* Make sure that the last case is the default label, as one is required.
1498 Then sort the labels, which is also required in GIMPLE. */
1499 CASE_LOW (last_case) = NULL;
1500 sort_case_labels (case_label_vec);
1502 /* Build the switch statement, setting last_case to be the default
1503 label. */
1504 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1505 case_label_vec);
1506 gimple_set_location (switch_stmt, finally_loc);
1508 /* Need to link SWITCH_STMT after running replace_goto_queue
1509 due to not wanting to process the same goto stmts twice. */
1510 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1511 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1514 /* Decide whether or not we are going to duplicate the finally block.
1515 There are several considerations.
1517 First, if this is Java, then the finally block contains code
1518 written by the user. It has line numbers associated with it,
1519 so duplicating the block means it's difficult to set a breakpoint.
1520 Since controlling code generation via -g is verboten, we simply
1521 never duplicate code without optimization.
1523 Second, we'd like to prevent egregious code growth. One way to
1524 do this is to estimate the size of the finally block, multiply
1525 that by the number of copies we'd need to make, and compare against
1526 the estimate of the size of the switch machinery we'd have to add. */
1528 static bool
1529 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1531 int f_estimate, sw_estimate;
1532 gimple eh_else;
1534 /* If there's an EH_ELSE involved, the exception path is separate
1535 and really doesn't come into play for this computation. */
1536 eh_else = get_eh_else (finally);
1537 if (eh_else)
1539 ndests -= may_throw;
1540 finally = gimple_eh_else_n_body (eh_else);
1543 if (!optimize)
1545 gimple_stmt_iterator gsi;
1547 if (ndests == 1)
1548 return true;
1550 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1552 gimple stmt = gsi_stmt (gsi);
1553 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1554 return false;
1556 return true;
1559 /* Finally estimate N times, plus N gotos. */
1560 f_estimate = count_insns_seq (finally, &eni_size_weights);
1561 f_estimate = (f_estimate + 1) * ndests;
1563 /* Switch statement (cost 10), N variable assignments, N gotos. */
1564 sw_estimate = 10 + 2 * ndests;
1566 /* Optimize for size clearly wants our best guess. */
1567 if (optimize_function_for_size_p (cfun))
1568 return f_estimate < sw_estimate;
1570 /* ??? These numbers are completely made up so far. */
1571 if (optimize > 1)
1572 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1573 else
1574 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1577 /* REG is the enclosing region for a possible cleanup region, or the region
1578 itself. Returns TRUE if such a region would be unreachable.
1580 Cleanup regions within a must-not-throw region aren't actually reachable
1581 even if there are throwing stmts within them, because the personality
1582 routine will call terminate before unwinding. */
1584 static bool
1585 cleanup_is_dead_in (eh_region reg)
1587 while (reg && reg->type == ERT_CLEANUP)
1588 reg = reg->outer;
1589 return (reg && reg->type == ERT_MUST_NOT_THROW);
1592 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1593 to a sequence of labels and blocks, plus the exception region trees
1594 that record all the magic. This is complicated by the need to
1595 arrange for the FINALLY block to be executed on all exits. */
1597 static gimple_seq
1598 lower_try_finally (struct leh_state *state, gimple tp)
1600 struct leh_tf_state this_tf;
1601 struct leh_state this_state;
1602 int ndests;
1603 gimple_seq old_eh_seq;
1605 /* Process the try block. */
1607 memset (&this_tf, 0, sizeof (this_tf));
1608 this_tf.try_finally_expr = tp;
1609 this_tf.top_p = tp;
1610 this_tf.outer = state;
1611 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1613 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1614 this_state.cur_region = this_tf.region;
1616 else
1618 this_tf.region = NULL;
1619 this_state.cur_region = state->cur_region;
1622 this_state.ehp_region = state->ehp_region;
1623 this_state.tf = &this_tf;
1625 old_eh_seq = eh_seq;
1626 eh_seq = NULL;
1628 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1630 /* Determine if the try block is escaped through the bottom. */
1631 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1633 /* Determine if any exceptions are possible within the try block. */
1634 if (this_tf.region)
1635 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1636 if (this_tf.may_throw)
1637 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1639 /* Determine how many edges (still) reach the finally block. Or rather,
1640 how many destinations are reached by the finally block. Use this to
1641 determine how we process the finally block itself. */
1643 ndests = VEC_length (tree, this_tf.dest_array);
1644 ndests += this_tf.may_fallthru;
1645 ndests += this_tf.may_return;
1646 ndests += this_tf.may_throw;
1648 /* If the FINALLY block is not reachable, dike it out. */
1649 if (ndests == 0)
1651 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1652 gimple_try_set_cleanup (tp, NULL);
1654 /* If the finally block doesn't fall through, then any destination
1655 we might try to impose there isn't reached either. There may be
1656 some minor amount of cleanup and redirection still needed. */
1657 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1658 lower_try_finally_nofallthru (state, &this_tf);
1660 /* We can easily special-case redirection to a single destination. */
1661 else if (ndests == 1)
1662 lower_try_finally_onedest (state, &this_tf);
1663 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1664 gimple_try_cleanup (tp)))
1665 lower_try_finally_copy (state, &this_tf);
1666 else
1667 lower_try_finally_switch (state, &this_tf);
1669 /* If someone requested we add a label at the end of the transformed
1670 block, do so. */
1671 if (this_tf.fallthru_label)
1673 /* This must be reached only if ndests == 0. */
1674 gimple x = gimple_build_label (this_tf.fallthru_label);
1675 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1678 VEC_free (tree, heap, this_tf.dest_array);
1679 free (this_tf.goto_queue);
1680 if (this_tf.goto_queue_map)
1681 pointer_map_destroy (this_tf.goto_queue_map);
1683 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1684 If there was no old eh_seq, then the append is trivially already done. */
1685 if (old_eh_seq)
1687 if (eh_seq == NULL)
1688 eh_seq = old_eh_seq;
1689 else
1691 gimple_seq new_eh_seq = eh_seq;
1692 eh_seq = old_eh_seq;
1693 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1697 return this_tf.top_p_seq;
1700 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1701 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1702 exception region trees that records all the magic. */
1704 static gimple_seq
1705 lower_catch (struct leh_state *state, gimple tp)
1707 eh_region try_region = NULL;
1708 struct leh_state this_state = *state;
1709 gimple_stmt_iterator gsi;
1710 tree out_label;
1711 gimple_seq new_seq, cleanup;
1712 gimple x;
1713 location_t try_catch_loc = gimple_location (tp);
1715 if (flag_exceptions)
1717 try_region = gen_eh_region_try (state->cur_region);
1718 this_state.cur_region = try_region;
1721 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1723 if (!eh_region_may_contain_throw (try_region))
1724 return gimple_try_eval (tp);
1726 new_seq = NULL;
1727 emit_eh_dispatch (&new_seq, try_region);
1728 emit_resx (&new_seq, try_region);
1730 this_state.cur_region = state->cur_region;
1731 this_state.ehp_region = try_region;
1733 out_label = NULL;
1734 cleanup = gimple_try_cleanup (tp);
1735 for (gsi = gsi_start (cleanup);
1736 !gsi_end_p (gsi);
1737 gsi_next (&gsi))
1739 eh_catch c;
1740 gimple gcatch;
1741 gimple_seq handler;
1743 gcatch = gsi_stmt (gsi);
1744 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1746 handler = gimple_catch_handler (gcatch);
1747 lower_eh_constructs_1 (&this_state, &handler);
1749 c->label = create_artificial_label (UNKNOWN_LOCATION);
1750 x = gimple_build_label (c->label);
1751 gimple_seq_add_stmt (&new_seq, x);
1753 gimple_seq_add_seq (&new_seq, handler);
1755 if (gimple_seq_may_fallthru (new_seq))
1757 if (!out_label)
1758 out_label = create_artificial_label (try_catch_loc);
1760 x = gimple_build_goto (out_label);
1761 gimple_seq_add_stmt (&new_seq, x);
1763 if (!c->type_list)
1764 break;
1767 gimple_try_set_cleanup (tp, new_seq);
1769 return frob_into_branch_around (tp, try_region, out_label);
1772 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1773 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1774 region trees that record all the magic. */
1776 static gimple_seq
1777 lower_eh_filter (struct leh_state *state, gimple tp)
1779 struct leh_state this_state = *state;
1780 eh_region this_region = NULL;
1781 gimple inner, x;
1782 gimple_seq new_seq;
1784 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1786 if (flag_exceptions)
1788 this_region = gen_eh_region_allowed (state->cur_region,
1789 gimple_eh_filter_types (inner));
1790 this_state.cur_region = this_region;
1793 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1795 if (!eh_region_may_contain_throw (this_region))
1796 return gimple_try_eval (tp);
1798 new_seq = NULL;
1799 this_state.cur_region = state->cur_region;
1800 this_state.ehp_region = this_region;
1802 emit_eh_dispatch (&new_seq, this_region);
1803 emit_resx (&new_seq, this_region);
1805 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1806 x = gimple_build_label (this_region->u.allowed.label);
1807 gimple_seq_add_stmt (&new_seq, x);
1809 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1810 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1812 gimple_try_set_cleanup (tp, new_seq);
1814 return frob_into_branch_around (tp, this_region, NULL);
1817 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1818 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1819 plus the exception region trees that record all the magic. */
1821 static gimple_seq
1822 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1824 struct leh_state this_state = *state;
1826 if (flag_exceptions)
1828 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1829 eh_region this_region;
1831 this_region = gen_eh_region_must_not_throw (state->cur_region);
1832 this_region->u.must_not_throw.failure_decl
1833 = gimple_eh_must_not_throw_fndecl (inner);
1834 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1836 /* In order to get mangling applied to this decl, we must mark it
1837 used now. Otherwise, pass_ipa_free_lang_data won't think it
1838 needs to happen. */
1839 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1841 this_state.cur_region = this_region;
1844 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1846 return gimple_try_eval (tp);
1849 /* Implement a cleanup expression. This is similar to try-finally,
1850 except that we only execute the cleanup block for exception edges. */
1852 static gimple_seq
1853 lower_cleanup (struct leh_state *state, gimple tp)
1855 struct leh_state this_state = *state;
1856 eh_region this_region = NULL;
1857 struct leh_tf_state fake_tf;
1858 gimple_seq result;
1859 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1861 if (flag_exceptions && !cleanup_dead)
1863 this_region = gen_eh_region_cleanup (state->cur_region);
1864 this_state.cur_region = this_region;
1867 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1869 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1870 return gimple_try_eval (tp);
1872 /* Build enough of a try-finally state so that we can reuse
1873 honor_protect_cleanup_actions. */
1874 memset (&fake_tf, 0, sizeof (fake_tf));
1875 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1876 fake_tf.outer = state;
1877 fake_tf.region = this_region;
1878 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1879 fake_tf.may_throw = true;
1881 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1883 if (fake_tf.may_throw)
1885 /* In this case honor_protect_cleanup_actions had nothing to do,
1886 and we should process this normally. */
1887 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1888 result = frob_into_branch_around (tp, this_region,
1889 fake_tf.fallthru_label);
1891 else
1893 /* In this case honor_protect_cleanup_actions did nearly all of
1894 the work. All we have left is to append the fallthru_label. */
1896 result = gimple_try_eval (tp);
1897 if (fake_tf.fallthru_label)
1899 gimple x = gimple_build_label (fake_tf.fallthru_label);
1900 gimple_seq_add_stmt (&result, x);
1903 return result;
1906 /* Main loop for lowering eh constructs. Also moves gsi to the next
1907 statement. */
1909 static void
1910 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1912 gimple_seq replace;
1913 gimple x;
1914 gimple stmt = gsi_stmt (*gsi);
1916 switch (gimple_code (stmt))
1918 case GIMPLE_CALL:
1920 tree fndecl = gimple_call_fndecl (stmt);
1921 tree rhs, lhs;
1923 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1924 switch (DECL_FUNCTION_CODE (fndecl))
1926 case BUILT_IN_EH_POINTER:
1927 /* The front end may have generated a call to
1928 __builtin_eh_pointer (0) within a catch region. Replace
1929 this zero argument with the current catch region number. */
1930 if (state->ehp_region)
1932 tree nr = build_int_cst (integer_type_node,
1933 state->ehp_region->index);
1934 gimple_call_set_arg (stmt, 0, nr);
1936 else
1938 /* The user has dome something silly. Remove it. */
1939 rhs = null_pointer_node;
1940 goto do_replace;
1942 break;
1944 case BUILT_IN_EH_FILTER:
1945 /* ??? This should never appear, but since it's a builtin it
1946 is accessible to abuse by users. Just remove it and
1947 replace the use with the arbitrary value zero. */
1948 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1949 do_replace:
1950 lhs = gimple_call_lhs (stmt);
1951 x = gimple_build_assign (lhs, rhs);
1952 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1953 /* FALLTHRU */
1955 case BUILT_IN_EH_COPY_VALUES:
1956 /* Likewise this should not appear. Remove it. */
1957 gsi_remove (gsi, true);
1958 return;
1960 default:
1961 break;
1964 /* FALLTHRU */
1966 case GIMPLE_ASSIGN:
1967 /* If the stmt can throw use a new temporary for the assignment
1968 to a LHS. This makes sure the old value of the LHS is
1969 available on the EH edge. Only do so for statements that
1970 potentially fall through (no noreturn calls e.g.), otherwise
1971 this new assignment might create fake fallthru regions. */
1972 if (stmt_could_throw_p (stmt)
1973 && gimple_has_lhs (stmt)
1974 && gimple_stmt_may_fallthru (stmt)
1975 && !tree_could_throw_p (gimple_get_lhs (stmt))
1976 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1978 tree lhs = gimple_get_lhs (stmt);
1979 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1980 gimple s = gimple_build_assign (lhs, tmp);
1981 gimple_set_location (s, gimple_location (stmt));
1982 gimple_set_block (s, gimple_block (stmt));
1983 gimple_set_lhs (stmt, tmp);
1984 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1985 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1986 DECL_GIMPLE_REG_P (tmp) = 1;
1987 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1989 /* Look for things that can throw exceptions, and record them. */
1990 if (state->cur_region && stmt_could_throw_p (stmt))
1992 record_stmt_eh_region (state->cur_region, stmt);
1993 note_eh_region_may_contain_throw (state->cur_region);
1995 break;
1997 case GIMPLE_COND:
1998 case GIMPLE_GOTO:
1999 case GIMPLE_RETURN:
2000 maybe_record_in_goto_queue (state, stmt);
2001 break;
2003 case GIMPLE_SWITCH:
2004 verify_norecord_switch_expr (state, stmt);
2005 break;
2007 case GIMPLE_TRY:
2008 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2009 replace = lower_try_finally (state, stmt);
2010 else
2012 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2013 if (!x)
2015 replace = gimple_try_eval (stmt);
2016 lower_eh_constructs_1 (state, &replace);
2018 else
2019 switch (gimple_code (x))
2021 case GIMPLE_CATCH:
2022 replace = lower_catch (state, stmt);
2023 break;
2024 case GIMPLE_EH_FILTER:
2025 replace = lower_eh_filter (state, stmt);
2026 break;
2027 case GIMPLE_EH_MUST_NOT_THROW:
2028 replace = lower_eh_must_not_throw (state, stmt);
2029 break;
2030 case GIMPLE_EH_ELSE:
2031 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2032 gcc_unreachable ();
2033 default:
2034 replace = lower_cleanup (state, stmt);
2035 break;
2039 /* Remove the old stmt and insert the transformed sequence
2040 instead. */
2041 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2042 gsi_remove (gsi, true);
2044 /* Return since we don't want gsi_next () */
2045 return;
2047 case GIMPLE_EH_ELSE:
2048 /* We should be eliminating this in lower_try_finally et al. */
2049 gcc_unreachable ();
2051 default:
2052 /* A type, a decl, or some kind of statement that we're not
2053 interested in. Don't walk them. */
2054 break;
2057 gsi_next (gsi);
2060 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2062 static void
2063 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2065 gimple_stmt_iterator gsi;
2066 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2067 lower_eh_constructs_2 (state, &gsi);
2070 static unsigned int
2071 lower_eh_constructs (void)
2073 struct leh_state null_state;
2074 gimple_seq bodyp;
2076 bodyp = gimple_body (current_function_decl);
2077 if (bodyp == NULL)
2078 return 0;
2080 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2081 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2082 memset (&null_state, 0, sizeof (null_state));
2084 collect_finally_tree_1 (bodyp, NULL);
2085 lower_eh_constructs_1 (&null_state, &bodyp);
2086 gimple_set_body (current_function_decl, bodyp);
2088 /* We assume there's a return statement, or something, at the end of
2089 the function, and thus ploping the EH sequence afterward won't
2090 change anything. */
2091 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2092 gimple_seq_add_seq (&bodyp, eh_seq);
2094 /* We assume that since BODYP already existed, adding EH_SEQ to it
2095 didn't change its value, and we don't have to re-set the function. */
2096 gcc_assert (bodyp == gimple_body (current_function_decl));
2098 htab_delete (finally_tree);
2099 BITMAP_FREE (eh_region_may_contain_throw_map);
2100 eh_seq = NULL;
2102 /* If this function needs a language specific EH personality routine
2103 and the frontend didn't already set one do so now. */
2104 if (function_needs_eh_personality (cfun) == eh_personality_lang
2105 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2106 DECL_FUNCTION_PERSONALITY (current_function_decl)
2107 = lang_hooks.eh_personality ();
2109 return 0;
2112 struct gimple_opt_pass pass_lower_eh =
2115 GIMPLE_PASS,
2116 "eh", /* name */
2117 NULL, /* gate */
2118 lower_eh_constructs, /* execute */
2119 NULL, /* sub */
2120 NULL, /* next */
2121 0, /* static_pass_number */
2122 TV_TREE_EH, /* tv_id */
2123 PROP_gimple_lcf, /* properties_required */
2124 PROP_gimple_leh, /* properties_provided */
2125 0, /* properties_destroyed */
2126 0, /* todo_flags_start */
2127 0 /* todo_flags_finish */
2131 /* Create the multiple edges from an EH_DISPATCH statement to all of
2132 the possible handlers for its EH region. Return true if there's
2133 no fallthru edge; false if there is. */
2135 bool
2136 make_eh_dispatch_edges (gimple stmt)
2138 eh_region r;
2139 eh_catch c;
2140 basic_block src, dst;
2142 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2143 src = gimple_bb (stmt);
2145 switch (r->type)
2147 case ERT_TRY:
2148 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2150 dst = label_to_block (c->label);
2151 make_edge (src, dst, 0);
2153 /* A catch-all handler doesn't have a fallthru. */
2154 if (c->type_list == NULL)
2155 return false;
2157 break;
2159 case ERT_ALLOWED_EXCEPTIONS:
2160 dst = label_to_block (r->u.allowed.label);
2161 make_edge (src, dst, 0);
2162 break;
2164 default:
2165 gcc_unreachable ();
2168 return true;
2171 /* Create the single EH edge from STMT to its nearest landing pad,
2172 if there is such a landing pad within the current function. */
2174 void
2175 make_eh_edges (gimple stmt)
2177 basic_block src, dst;
2178 eh_landing_pad lp;
2179 int lp_nr;
2181 lp_nr = lookup_stmt_eh_lp (stmt);
2182 if (lp_nr <= 0)
2183 return;
2185 lp = get_eh_landing_pad_from_number (lp_nr);
2186 gcc_assert (lp != NULL);
2188 src = gimple_bb (stmt);
2189 dst = label_to_block (lp->post_landing_pad);
2190 make_edge (src, dst, EDGE_EH);
2193 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2194 do not actually perform the final edge redirection.
2196 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2197 we intend to change the destination EH region as well; this means
2198 EH_LANDING_PAD_NR must already be set on the destination block label.
2199 If false, we're being called from generic cfg manipulation code and we
2200 should preserve our place within the region tree. */
2202 static void
2203 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2205 eh_landing_pad old_lp, new_lp;
2206 basic_block old_bb;
2207 gimple throw_stmt;
2208 int old_lp_nr, new_lp_nr;
2209 tree old_label, new_label;
2210 edge_iterator ei;
2211 edge e;
2213 old_bb = edge_in->dest;
2214 old_label = gimple_block_label (old_bb);
2215 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2216 gcc_assert (old_lp_nr > 0);
2217 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2219 throw_stmt = last_stmt (edge_in->src);
2220 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2222 new_label = gimple_block_label (new_bb);
2224 /* Look for an existing region that might be using NEW_BB already. */
2225 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2226 if (new_lp_nr)
2228 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2229 gcc_assert (new_lp);
2231 /* Unless CHANGE_REGION is true, the new and old landing pad
2232 had better be associated with the same EH region. */
2233 gcc_assert (change_region || new_lp->region == old_lp->region);
2235 else
2237 new_lp = NULL;
2238 gcc_assert (!change_region);
2241 /* Notice when we redirect the last EH edge away from OLD_BB. */
2242 FOR_EACH_EDGE (e, ei, old_bb->preds)
2243 if (e != edge_in && (e->flags & EDGE_EH))
2244 break;
2246 if (new_lp)
2248 /* NEW_LP already exists. If there are still edges into OLD_LP,
2249 there's nothing to do with the EH tree. If there are no more
2250 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2251 If CHANGE_REGION is true, then our caller is expecting to remove
2252 the landing pad. */
2253 if (e == NULL && !change_region)
2254 remove_eh_landing_pad (old_lp);
2256 else
2258 /* No correct landing pad exists. If there are no more edges
2259 into OLD_LP, then we can simply re-use the existing landing pad.
2260 Otherwise, we have to create a new landing pad. */
2261 if (e == NULL)
2263 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2264 new_lp = old_lp;
2266 else
2267 new_lp = gen_eh_landing_pad (old_lp->region);
2268 new_lp->post_landing_pad = new_label;
2269 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2272 /* Maybe move the throwing statement to the new region. */
2273 if (old_lp != new_lp)
2275 remove_stmt_from_eh_lp (throw_stmt);
2276 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2280 /* Redirect EH edge E to NEW_BB. */
2282 edge
2283 redirect_eh_edge (edge edge_in, basic_block new_bb)
2285 redirect_eh_edge_1 (edge_in, new_bb, false);
2286 return ssa_redirect_edge (edge_in, new_bb);
2289 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2290 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2291 The actual edge update will happen in the caller. */
2293 void
2294 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2296 tree new_lab = gimple_block_label (new_bb);
2297 bool any_changed = false;
2298 basic_block old_bb;
2299 eh_region r;
2300 eh_catch c;
2302 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2303 switch (r->type)
2305 case ERT_TRY:
2306 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2308 old_bb = label_to_block (c->label);
2309 if (old_bb == e->dest)
2311 c->label = new_lab;
2312 any_changed = true;
2315 break;
2317 case ERT_ALLOWED_EXCEPTIONS:
2318 old_bb = label_to_block (r->u.allowed.label);
2319 gcc_assert (old_bb == e->dest);
2320 r->u.allowed.label = new_lab;
2321 any_changed = true;
2322 break;
2324 default:
2325 gcc_unreachable ();
2328 gcc_assert (any_changed);
2331 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2333 bool
2334 operation_could_trap_helper_p (enum tree_code op,
2335 bool fp_operation,
2336 bool honor_trapv,
2337 bool honor_nans,
2338 bool honor_snans,
2339 tree divisor,
2340 bool *handled)
2342 *handled = true;
2343 switch (op)
2345 case TRUNC_DIV_EXPR:
2346 case CEIL_DIV_EXPR:
2347 case FLOOR_DIV_EXPR:
2348 case ROUND_DIV_EXPR:
2349 case EXACT_DIV_EXPR:
2350 case CEIL_MOD_EXPR:
2351 case FLOOR_MOD_EXPR:
2352 case ROUND_MOD_EXPR:
2353 case TRUNC_MOD_EXPR:
2354 case RDIV_EXPR:
2355 if (honor_snans || honor_trapv)
2356 return true;
2357 if (fp_operation)
2358 return flag_trapping_math;
2359 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2360 return true;
2361 return false;
2363 case LT_EXPR:
2364 case LE_EXPR:
2365 case GT_EXPR:
2366 case GE_EXPR:
2367 case LTGT_EXPR:
2368 /* Some floating point comparisons may trap. */
2369 return honor_nans;
2371 case EQ_EXPR:
2372 case NE_EXPR:
2373 case UNORDERED_EXPR:
2374 case ORDERED_EXPR:
2375 case UNLT_EXPR:
2376 case UNLE_EXPR:
2377 case UNGT_EXPR:
2378 case UNGE_EXPR:
2379 case UNEQ_EXPR:
2380 return honor_snans;
2382 case CONVERT_EXPR:
2383 case FIX_TRUNC_EXPR:
2384 /* Conversion of floating point might trap. */
2385 return honor_nans;
2387 case NEGATE_EXPR:
2388 case ABS_EXPR:
2389 case CONJ_EXPR:
2390 /* These operations don't trap with floating point. */
2391 if (honor_trapv)
2392 return true;
2393 return false;
2395 case PLUS_EXPR:
2396 case MINUS_EXPR:
2397 case MULT_EXPR:
2398 /* Any floating arithmetic may trap. */
2399 if (fp_operation && flag_trapping_math)
2400 return true;
2401 if (honor_trapv)
2402 return true;
2403 return false;
2405 case COMPLEX_EXPR:
2406 case CONSTRUCTOR:
2407 /* Constructing an object cannot trap. */
2408 return false;
2410 default:
2411 /* Any floating arithmetic may trap. */
2412 if (fp_operation && flag_trapping_math)
2413 return true;
2415 *handled = false;
2416 return false;
2420 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2421 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2422 type operands that may trap. If OP is a division operator, DIVISOR contains
2423 the value of the divisor. */
2425 bool
2426 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2427 tree divisor)
2429 bool honor_nans = (fp_operation && flag_trapping_math
2430 && !flag_finite_math_only);
2431 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2432 bool handled;
2434 if (TREE_CODE_CLASS (op) != tcc_comparison
2435 && TREE_CODE_CLASS (op) != tcc_unary
2436 && TREE_CODE_CLASS (op) != tcc_binary)
2437 return false;
2439 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2440 honor_nans, honor_snans, divisor,
2441 &handled);
2444 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2445 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2446 This routine expects only GIMPLE lhs or rhs input. */
2448 bool
2449 tree_could_trap_p (tree expr)
2451 enum tree_code code;
2452 bool fp_operation = false;
2453 bool honor_trapv = false;
2454 tree t, base, div = NULL_TREE;
2456 if (!expr)
2457 return false;
2459 code = TREE_CODE (expr);
2460 t = TREE_TYPE (expr);
2462 if (t)
2464 if (COMPARISON_CLASS_P (expr))
2465 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2466 else
2467 fp_operation = FLOAT_TYPE_P (t);
2468 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2471 if (TREE_CODE_CLASS (code) == tcc_binary)
2472 div = TREE_OPERAND (expr, 1);
2473 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2474 return true;
2476 restart:
2477 switch (code)
2479 case TARGET_MEM_REF:
2480 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2481 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2482 return false;
2483 return !TREE_THIS_NOTRAP (expr);
2485 case COMPONENT_REF:
2486 case REALPART_EXPR:
2487 case IMAGPART_EXPR:
2488 case BIT_FIELD_REF:
2489 case VIEW_CONVERT_EXPR:
2490 case WITH_SIZE_EXPR:
2491 expr = TREE_OPERAND (expr, 0);
2492 code = TREE_CODE (expr);
2493 goto restart;
2495 case ARRAY_RANGE_REF:
2496 base = TREE_OPERAND (expr, 0);
2497 if (tree_could_trap_p (base))
2498 return true;
2499 if (TREE_THIS_NOTRAP (expr))
2500 return false;
2501 return !range_in_array_bounds_p (expr);
2503 case ARRAY_REF:
2504 base = TREE_OPERAND (expr, 0);
2505 if (tree_could_trap_p (base))
2506 return true;
2507 if (TREE_THIS_NOTRAP (expr))
2508 return false;
2509 return !in_array_bounds_p (expr);
2511 case MEM_REF:
2512 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2513 return false;
2514 /* Fallthru. */
2515 case INDIRECT_REF:
2516 return !TREE_THIS_NOTRAP (expr);
2518 case ASM_EXPR:
2519 return TREE_THIS_VOLATILE (expr);
2521 case CALL_EXPR:
2522 t = get_callee_fndecl (expr);
2523 /* Assume that calls to weak functions may trap. */
2524 if (!t || !DECL_P (t))
2525 return true;
2526 if (DECL_WEAK (t))
2527 return tree_could_trap_p (t);
2528 return false;
2530 case FUNCTION_DECL:
2531 /* Assume that accesses to weak functions may trap, unless we know
2532 they are certainly defined in current TU or in some other
2533 LTO partition. */
2534 if (DECL_WEAK (expr))
2536 struct cgraph_node *node;
2537 if (!DECL_EXTERNAL (expr))
2538 return false;
2539 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2540 if (node && node->symbol.in_other_partition)
2541 return false;
2542 return true;
2544 return false;
2546 case VAR_DECL:
2547 /* Assume that accesses to weak vars may trap, unless we know
2548 they are certainly defined in current TU or in some other
2549 LTO partition. */
2550 if (DECL_WEAK (expr))
2552 struct varpool_node *node;
2553 if (!DECL_EXTERNAL (expr))
2554 return false;
2555 node = varpool_variable_node (varpool_get_node (expr), NULL);
2556 if (node && node->symbol.in_other_partition)
2557 return false;
2558 return true;
2560 return false;
2562 default:
2563 return false;
2568 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2569 an assignment or a conditional) may throw. */
2571 static bool
2572 stmt_could_throw_1_p (gimple stmt)
2574 enum tree_code code = gimple_expr_code (stmt);
2575 bool honor_nans = false;
2576 bool honor_snans = false;
2577 bool fp_operation = false;
2578 bool honor_trapv = false;
2579 tree t;
2580 size_t i;
2581 bool handled, ret;
2583 if (TREE_CODE_CLASS (code) == tcc_comparison
2584 || TREE_CODE_CLASS (code) == tcc_unary
2585 || TREE_CODE_CLASS (code) == tcc_binary)
2587 if (is_gimple_assign (stmt)
2588 && TREE_CODE_CLASS (code) == tcc_comparison)
2589 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2590 else if (gimple_code (stmt) == GIMPLE_COND)
2591 t = TREE_TYPE (gimple_cond_lhs (stmt));
2592 else
2593 t = gimple_expr_type (stmt);
2594 fp_operation = FLOAT_TYPE_P (t);
2595 if (fp_operation)
2597 honor_nans = flag_trapping_math && !flag_finite_math_only;
2598 honor_snans = flag_signaling_nans != 0;
2600 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2601 honor_trapv = true;
2604 /* Check if the main expression may trap. */
2605 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2606 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2607 honor_nans, honor_snans, t,
2608 &handled);
2609 if (handled)
2610 return ret;
2612 /* If the expression does not trap, see if any of the individual operands may
2613 trap. */
2614 for (i = 0; i < gimple_num_ops (stmt); i++)
2615 if (tree_could_trap_p (gimple_op (stmt, i)))
2616 return true;
2618 return false;
2622 /* Return true if statement STMT could throw an exception. */
2624 bool
2625 stmt_could_throw_p (gimple stmt)
2627 if (!flag_exceptions)
2628 return false;
2630 /* The only statements that can throw an exception are assignments,
2631 conditionals, calls, resx, and asms. */
2632 switch (gimple_code (stmt))
2634 case GIMPLE_RESX:
2635 return true;
2637 case GIMPLE_CALL:
2638 return !gimple_call_nothrow_p (stmt);
2640 case GIMPLE_ASSIGN:
2641 case GIMPLE_COND:
2642 if (!cfun->can_throw_non_call_exceptions)
2643 return false;
2644 return stmt_could_throw_1_p (stmt);
2646 case GIMPLE_ASM:
2647 if (!cfun->can_throw_non_call_exceptions)
2648 return false;
2649 return gimple_asm_volatile_p (stmt);
2651 default:
2652 return false;
2657 /* Return true if expression T could throw an exception. */
2659 bool
2660 tree_could_throw_p (tree t)
2662 if (!flag_exceptions)
2663 return false;
2664 if (TREE_CODE (t) == MODIFY_EXPR)
2666 if (cfun->can_throw_non_call_exceptions
2667 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2668 return true;
2669 t = TREE_OPERAND (t, 1);
2672 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2673 t = TREE_OPERAND (t, 0);
2674 if (TREE_CODE (t) == CALL_EXPR)
2675 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2676 if (cfun->can_throw_non_call_exceptions)
2677 return tree_could_trap_p (t);
2678 return false;
2681 /* Return true if STMT can throw an exception that is not caught within
2682 the current function (CFUN). */
2684 bool
2685 stmt_can_throw_external (gimple stmt)
2687 int lp_nr;
2689 if (!stmt_could_throw_p (stmt))
2690 return false;
2692 lp_nr = lookup_stmt_eh_lp (stmt);
2693 return lp_nr == 0;
2696 /* Return true if STMT can throw an exception that is caught within
2697 the current function (CFUN). */
2699 bool
2700 stmt_can_throw_internal (gimple stmt)
2702 int lp_nr;
2704 if (!stmt_could_throw_p (stmt))
2705 return false;
2707 lp_nr = lookup_stmt_eh_lp (stmt);
2708 return lp_nr > 0;
2711 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2712 remove any entry it might have from the EH table. Return true if
2713 any change was made. */
2715 bool
2716 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2718 if (stmt_could_throw_p (stmt))
2719 return false;
2720 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2723 /* Likewise, but always use the current function. */
2725 bool
2726 maybe_clean_eh_stmt (gimple stmt)
2728 return maybe_clean_eh_stmt_fn (cfun, stmt);
2731 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2732 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2733 in the table if it should be in there. Return TRUE if a replacement was
2734 done that my require an EH edge purge. */
2736 bool
2737 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2739 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2741 if (lp_nr != 0)
2743 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2745 if (new_stmt == old_stmt && new_stmt_could_throw)
2746 return false;
2748 remove_stmt_from_eh_lp (old_stmt);
2749 if (new_stmt_could_throw)
2751 add_stmt_to_eh_lp (new_stmt, lp_nr);
2752 return false;
2754 else
2755 return true;
2758 return false;
2761 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2762 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2763 operand is the return value of duplicate_eh_regions. */
2765 bool
2766 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2767 struct function *old_fun, gimple old_stmt,
2768 struct pointer_map_t *map, int default_lp_nr)
2770 int old_lp_nr, new_lp_nr;
2771 void **slot;
2773 if (!stmt_could_throw_p (new_stmt))
2774 return false;
2776 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2777 if (old_lp_nr == 0)
2779 if (default_lp_nr == 0)
2780 return false;
2781 new_lp_nr = default_lp_nr;
2783 else if (old_lp_nr > 0)
2785 eh_landing_pad old_lp, new_lp;
2787 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2788 slot = pointer_map_contains (map, old_lp);
2789 new_lp = (eh_landing_pad) *slot;
2790 new_lp_nr = new_lp->index;
2792 else
2794 eh_region old_r, new_r;
2796 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2797 slot = pointer_map_contains (map, old_r);
2798 new_r = (eh_region) *slot;
2799 new_lp_nr = -new_r->index;
2802 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2803 return true;
2806 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2807 and thus no remapping is required. */
2809 bool
2810 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2812 int lp_nr;
2814 if (!stmt_could_throw_p (new_stmt))
2815 return false;
2817 lp_nr = lookup_stmt_eh_lp (old_stmt);
2818 if (lp_nr == 0)
2819 return false;
2821 add_stmt_to_eh_lp (new_stmt, lp_nr);
2822 return true;
2825 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2826 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2827 this only handles handlers consisting of a single call, as that's the
2828 important case for C++: a destructor call for a particular object showing
2829 up in multiple handlers. */
2831 static bool
2832 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2834 gimple_stmt_iterator gsi;
2835 gimple ones, twos;
2836 unsigned int ai;
2838 gsi = gsi_start (oneh);
2839 if (!gsi_one_before_end_p (gsi))
2840 return false;
2841 ones = gsi_stmt (gsi);
2843 gsi = gsi_start (twoh);
2844 if (!gsi_one_before_end_p (gsi))
2845 return false;
2846 twos = gsi_stmt (gsi);
2848 if (!is_gimple_call (ones)
2849 || !is_gimple_call (twos)
2850 || gimple_call_lhs (ones)
2851 || gimple_call_lhs (twos)
2852 || gimple_call_chain (ones)
2853 || gimple_call_chain (twos)
2854 || !gimple_call_same_target_p (ones, twos)
2855 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2856 return false;
2858 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2859 if (!operand_equal_p (gimple_call_arg (ones, ai),
2860 gimple_call_arg (twos, ai), 0))
2861 return false;
2863 return true;
2866 /* Optimize
2867 try { A() } finally { try { ~B() } catch { ~A() } }
2868 try { ... } finally { ~A() }
2869 into
2870 try { A() } catch { ~B() }
2871 try { ~B() ... } finally { ~A() }
2873 This occurs frequently in C++, where A is a local variable and B is a
2874 temporary used in the initializer for A. */
2876 static void
2877 optimize_double_finally (gimple one, gimple two)
2879 gimple oneh;
2880 gimple_stmt_iterator gsi;
2881 gimple_seq cleanup;
2883 cleanup = gimple_try_cleanup (one);
2884 gsi = gsi_start (cleanup);
2885 if (!gsi_one_before_end_p (gsi))
2886 return;
2888 oneh = gsi_stmt (gsi);
2889 if (gimple_code (oneh) != GIMPLE_TRY
2890 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2891 return;
2893 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2895 gimple_seq seq = gimple_try_eval (oneh);
2897 gimple_try_set_cleanup (one, seq);
2898 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2899 seq = copy_gimple_seq_and_replace_locals (seq);
2900 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2901 gimple_try_set_eval (two, seq);
2905 /* Perform EH refactoring optimizations that are simpler to do when code
2906 flow has been lowered but EH structures haven't. */
2908 static void
2909 refactor_eh_r (gimple_seq seq)
2911 gimple_stmt_iterator gsi;
2912 gimple one, two;
2914 one = NULL;
2915 two = NULL;
2916 gsi = gsi_start (seq);
2917 while (1)
2919 one = two;
2920 if (gsi_end_p (gsi))
2921 two = NULL;
2922 else
2923 two = gsi_stmt (gsi);
2924 if (one
2925 && two
2926 && gimple_code (one) == GIMPLE_TRY
2927 && gimple_code (two) == GIMPLE_TRY
2928 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2929 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2930 optimize_double_finally (one, two);
2931 if (one)
2932 switch (gimple_code (one))
2934 case GIMPLE_TRY:
2935 refactor_eh_r (gimple_try_eval (one));
2936 refactor_eh_r (gimple_try_cleanup (one));
2937 break;
2938 case GIMPLE_CATCH:
2939 refactor_eh_r (gimple_catch_handler (one));
2940 break;
2941 case GIMPLE_EH_FILTER:
2942 refactor_eh_r (gimple_eh_filter_failure (one));
2943 break;
2944 case GIMPLE_EH_ELSE:
2945 refactor_eh_r (gimple_eh_else_n_body (one));
2946 refactor_eh_r (gimple_eh_else_e_body (one));
2947 break;
2948 default:
2949 break;
2951 if (two)
2952 gsi_next (&gsi);
2953 else
2954 break;
2958 static unsigned
2959 refactor_eh (void)
2961 refactor_eh_r (gimple_body (current_function_decl));
2962 return 0;
2965 static bool
2966 gate_refactor_eh (void)
2968 return flag_exceptions != 0;
2971 struct gimple_opt_pass pass_refactor_eh =
2974 GIMPLE_PASS,
2975 "ehopt", /* name */
2976 gate_refactor_eh, /* gate */
2977 refactor_eh, /* execute */
2978 NULL, /* sub */
2979 NULL, /* next */
2980 0, /* static_pass_number */
2981 TV_TREE_EH, /* tv_id */
2982 PROP_gimple_lcf, /* properties_required */
2983 0, /* properties_provided */
2984 0, /* properties_destroyed */
2985 0, /* todo_flags_start */
2986 0 /* todo_flags_finish */
2990 /* At the end of gimple optimization, we can lower RESX. */
2992 static bool
2993 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2995 int lp_nr;
2996 eh_region src_r, dst_r;
2997 gimple_stmt_iterator gsi;
2998 gimple x;
2999 tree fn, src_nr;
3000 bool ret = false;
3002 lp_nr = lookup_stmt_eh_lp (stmt);
3003 if (lp_nr != 0)
3004 dst_r = get_eh_region_from_lp_number (lp_nr);
3005 else
3006 dst_r = NULL;
3008 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3009 gsi = gsi_last_bb (bb);
3011 if (src_r == NULL)
3013 /* We can wind up with no source region when pass_cleanup_eh shows
3014 that there are no entries into an eh region and deletes it, but
3015 then the block that contains the resx isn't removed. This can
3016 happen without optimization when the switch statement created by
3017 lower_try_finally_switch isn't simplified to remove the eh case.
3019 Resolve this by expanding the resx node to an abort. */
3021 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3022 x = gimple_build_call (fn, 0);
3023 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3025 while (EDGE_COUNT (bb->succs) > 0)
3026 remove_edge (EDGE_SUCC (bb, 0));
3028 else if (dst_r)
3030 /* When we have a destination region, we resolve this by copying
3031 the excptr and filter values into place, and changing the edge
3032 to immediately after the landing pad. */
3033 edge e;
3035 if (lp_nr < 0)
3037 basic_block new_bb;
3038 void **slot;
3039 tree lab;
3041 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3042 the failure decl into a new block, if needed. */
3043 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3045 slot = pointer_map_contains (mnt_map, dst_r);
3046 if (slot == NULL)
3048 gimple_stmt_iterator gsi2;
3050 new_bb = create_empty_bb (bb);
3051 if (current_loops)
3052 add_bb_to_loop (new_bb, bb->loop_father);
3053 lab = gimple_block_label (new_bb);
3054 gsi2 = gsi_start_bb (new_bb);
3056 fn = dst_r->u.must_not_throw.failure_decl;
3057 x = gimple_build_call (fn, 0);
3058 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3059 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3061 slot = pointer_map_insert (mnt_map, dst_r);
3062 *slot = lab;
3064 else
3066 lab = (tree) *slot;
3067 new_bb = label_to_block (lab);
3070 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3071 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3072 e->count = bb->count;
3073 e->probability = REG_BR_PROB_BASE;
3075 else
3077 edge_iterator ei;
3078 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3080 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3081 src_nr = build_int_cst (integer_type_node, src_r->index);
3082 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3083 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3085 /* Update the flags for the outgoing edge. */
3086 e = single_succ_edge (bb);
3087 gcc_assert (e->flags & EDGE_EH);
3088 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3090 /* If there are no more EH users of the landing pad, delete it. */
3091 FOR_EACH_EDGE (e, ei, e->dest->preds)
3092 if (e->flags & EDGE_EH)
3093 break;
3094 if (e == NULL)
3096 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3097 remove_eh_landing_pad (lp);
3101 ret = true;
3103 else
3105 tree var;
3107 /* When we don't have a destination region, this exception escapes
3108 up the call chain. We resolve this by generating a call to the
3109 _Unwind_Resume library function. */
3111 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3112 with no arguments for C++ and Java. Check for that. */
3113 if (src_r->use_cxa_end_cleanup)
3115 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3116 x = gimple_build_call (fn, 0);
3117 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3119 else
3121 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3122 src_nr = build_int_cst (integer_type_node, src_r->index);
3123 x = gimple_build_call (fn, 1, src_nr);
3124 var = create_tmp_var (ptr_type_node, NULL);
3125 var = make_ssa_name (var, x);
3126 gimple_call_set_lhs (x, var);
3127 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3129 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3130 x = gimple_build_call (fn, 1, var);
3131 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3134 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3137 gsi_remove (&gsi, true);
3139 return ret;
3142 static unsigned
3143 execute_lower_resx (void)
3145 basic_block bb;
3146 struct pointer_map_t *mnt_map;
3147 bool dominance_invalidated = false;
3148 bool any_rewritten = false;
3150 mnt_map = pointer_map_create ();
3152 FOR_EACH_BB (bb)
3154 gimple last = last_stmt (bb);
3155 if (last && is_gimple_resx (last))
3157 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3158 any_rewritten = true;
3162 pointer_map_destroy (mnt_map);
3164 if (dominance_invalidated)
3166 free_dominance_info (CDI_DOMINATORS);
3167 free_dominance_info (CDI_POST_DOMINATORS);
3170 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3173 static bool
3174 gate_lower_resx (void)
3176 return flag_exceptions != 0;
3179 struct gimple_opt_pass pass_lower_resx =
3182 GIMPLE_PASS,
3183 "resx", /* name */
3184 gate_lower_resx, /* gate */
3185 execute_lower_resx, /* execute */
3186 NULL, /* sub */
3187 NULL, /* next */
3188 0, /* static_pass_number */
3189 TV_TREE_EH, /* tv_id */
3190 PROP_gimple_lcf, /* properties_required */
3191 0, /* properties_provided */
3192 0, /* properties_destroyed */
3193 0, /* todo_flags_start */
3194 TODO_verify_flow /* todo_flags_finish */
3198 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3199 external throw. */
3201 static void
3202 optimize_clobbers (basic_block bb)
3204 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3205 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3207 gimple stmt = gsi_stmt (gsi);
3208 if (is_gimple_debug (stmt))
3209 continue;
3210 if (!gimple_clobber_p (stmt)
3211 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3212 return;
3213 unlink_stmt_vdef (stmt);
3214 gsi_remove (&gsi, true);
3215 release_defs (stmt);
3219 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3220 internal throw to successor BB. */
3222 static int
3223 sink_clobbers (basic_block bb)
3225 edge e;
3226 edge_iterator ei;
3227 gimple_stmt_iterator gsi, dgsi;
3228 basic_block succbb;
3229 bool any_clobbers = false;
3231 /* Only optimize if BB has a single EH successor and
3232 all predecessor edges are EH too. */
3233 if (!single_succ_p (bb)
3234 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3235 return 0;
3237 FOR_EACH_EDGE (e, ei, bb->preds)
3239 if ((e->flags & EDGE_EH) == 0)
3240 return 0;
3243 /* And BB contains only CLOBBER stmts before the final
3244 RESX. */
3245 gsi = gsi_last_bb (bb);
3246 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3248 gimple stmt = gsi_stmt (gsi);
3249 if (is_gimple_debug (stmt))
3250 continue;
3251 if (gimple_code (stmt) == GIMPLE_LABEL)
3252 break;
3253 if (!gimple_clobber_p (stmt)
3254 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3255 return 0;
3256 any_clobbers = true;
3258 if (!any_clobbers)
3259 return 0;
3261 succbb = single_succ (bb);
3262 dgsi = gsi_after_labels (succbb);
3263 gsi = gsi_last_bb (bb);
3264 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3266 gimple stmt = gsi_stmt (gsi);
3267 if (is_gimple_debug (stmt))
3268 continue;
3269 if (gimple_code (stmt) == GIMPLE_LABEL)
3270 break;
3271 unlink_stmt_vdef (stmt);
3272 gsi_remove (&gsi, false);
3273 /* Trigger the operand scanner to cause renaming for virtual
3274 operands for this statement.
3275 ??? Given the simple structure of this code manually
3276 figuring out the reaching definition should not be too hard. */
3277 if (gimple_vuse (stmt))
3278 gimple_set_vuse (stmt, NULL_TREE);
3279 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3282 return TODO_update_ssa_only_virtuals;
3285 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3286 we have found some duplicate labels and removed some edges. */
3288 static bool
3289 lower_eh_dispatch (basic_block src, gimple stmt)
3291 gimple_stmt_iterator gsi;
3292 int region_nr;
3293 eh_region r;
3294 tree filter, fn;
3295 gimple x;
3296 bool redirected = false;
3298 region_nr = gimple_eh_dispatch_region (stmt);
3299 r = get_eh_region_from_number (region_nr);
3301 gsi = gsi_last_bb (src);
3303 switch (r->type)
3305 case ERT_TRY:
3307 VEC (tree, heap) *labels = NULL;
3308 tree default_label = NULL;
3309 eh_catch c;
3310 edge_iterator ei;
3311 edge e;
3312 struct pointer_set_t *seen_values = pointer_set_create ();
3314 /* Collect the labels for a switch. Zero the post_landing_pad
3315 field becase we'll no longer have anything keeping these labels
3316 in existence and the optimizer will be free to merge these
3317 blocks at will. */
3318 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3320 tree tp_node, flt_node, lab = c->label;
3321 bool have_label = false;
3323 c->label = NULL;
3324 tp_node = c->type_list;
3325 flt_node = c->filter_list;
3327 if (tp_node == NULL)
3329 default_label = lab;
3330 break;
3334 /* Filter out duplicate labels that arise when this handler
3335 is shadowed by an earlier one. When no labels are
3336 attached to the handler anymore, we remove
3337 the corresponding edge and then we delete unreachable
3338 blocks at the end of this pass. */
3339 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3341 tree t = build_case_label (TREE_VALUE (flt_node),
3342 NULL, lab);
3343 VEC_safe_push (tree, heap, labels, t);
3344 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3345 have_label = true;
3348 tp_node = TREE_CHAIN (tp_node);
3349 flt_node = TREE_CHAIN (flt_node);
3351 while (tp_node);
3352 if (! have_label)
3354 remove_edge (find_edge (src, label_to_block (lab)));
3355 redirected = true;
3359 /* Clean up the edge flags. */
3360 FOR_EACH_EDGE (e, ei, src->succs)
3362 if (e->flags & EDGE_FALLTHRU)
3364 /* If there was no catch-all, use the fallthru edge. */
3365 if (default_label == NULL)
3366 default_label = gimple_block_label (e->dest);
3367 e->flags &= ~EDGE_FALLTHRU;
3370 gcc_assert (default_label != NULL);
3372 /* Don't generate a switch if there's only a default case.
3373 This is common in the form of try { A; } catch (...) { B; }. */
3374 if (labels == NULL)
3376 e = single_succ_edge (src);
3377 e->flags |= EDGE_FALLTHRU;
3379 else
3381 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3382 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3383 region_nr));
3384 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3385 filter = make_ssa_name (filter, x);
3386 gimple_call_set_lhs (x, filter);
3387 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3389 /* Turn the default label into a default case. */
3390 default_label = build_case_label (NULL, NULL, default_label);
3391 sort_case_labels (labels);
3393 x = gimple_build_switch (filter, default_label, labels);
3394 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3396 VEC_free (tree, heap, labels);
3398 pointer_set_destroy (seen_values);
3400 break;
3402 case ERT_ALLOWED_EXCEPTIONS:
3404 edge b_e = BRANCH_EDGE (src);
3405 edge f_e = FALLTHRU_EDGE (src);
3407 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3408 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3409 region_nr));
3410 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3411 filter = make_ssa_name (filter, x);
3412 gimple_call_set_lhs (x, filter);
3413 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3415 r->u.allowed.label = NULL;
3416 x = gimple_build_cond (EQ_EXPR, filter,
3417 build_int_cst (TREE_TYPE (filter),
3418 r->u.allowed.filter),
3419 NULL_TREE, NULL_TREE);
3420 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3422 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3423 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3425 break;
3427 default:
3428 gcc_unreachable ();
3431 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3432 gsi_remove (&gsi, true);
3433 return redirected;
3436 static unsigned
3437 execute_lower_eh_dispatch (void)
3439 basic_block bb;
3440 int flags = 0;
3441 bool redirected = false;
3443 assign_filter_values ();
3445 FOR_EACH_BB (bb)
3447 gimple last = last_stmt (bb);
3448 if (last == NULL)
3449 continue;
3450 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3452 redirected |= lower_eh_dispatch (bb, last);
3453 flags |= TODO_update_ssa_only_virtuals;
3455 else if (gimple_code (last) == GIMPLE_RESX)
3457 if (stmt_can_throw_external (last))
3458 optimize_clobbers (bb);
3459 else
3460 flags |= sink_clobbers (bb);
3464 if (redirected)
3465 delete_unreachable_blocks ();
3466 return flags;
3469 static bool
3470 gate_lower_eh_dispatch (void)
3472 return cfun->eh->region_tree != NULL;
3475 struct gimple_opt_pass pass_lower_eh_dispatch =
3478 GIMPLE_PASS,
3479 "ehdisp", /* name */
3480 gate_lower_eh_dispatch, /* gate */
3481 execute_lower_eh_dispatch, /* execute */
3482 NULL, /* sub */
3483 NULL, /* next */
3484 0, /* static_pass_number */
3485 TV_TREE_EH, /* tv_id */
3486 PROP_gimple_lcf, /* properties_required */
3487 0, /* properties_provided */
3488 0, /* properties_destroyed */
3489 0, /* todo_flags_start */
3490 TODO_verify_flow /* todo_flags_finish */
3494 /* Walk statements, see what regions are really referenced and remove
3495 those that are unused. */
3497 static void
3498 remove_unreachable_handlers (void)
3500 sbitmap r_reachable, lp_reachable;
3501 eh_region region;
3502 eh_landing_pad lp;
3503 basic_block bb;
3504 int lp_nr, r_nr;
3506 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3507 lp_reachable
3508 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3509 sbitmap_zero (r_reachable);
3510 sbitmap_zero (lp_reachable);
3512 FOR_EACH_BB (bb)
3514 gimple_stmt_iterator gsi;
3516 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3518 gimple stmt = gsi_stmt (gsi);
3519 lp_nr = lookup_stmt_eh_lp (stmt);
3521 /* Negative LP numbers are MUST_NOT_THROW regions which
3522 are not considered BB enders. */
3523 if (lp_nr < 0)
3524 SET_BIT (r_reachable, -lp_nr);
3526 /* Positive LP numbers are real landing pads, are are BB enders. */
3527 else if (lp_nr > 0)
3529 gcc_assert (gsi_one_before_end_p (gsi));
3530 region = get_eh_region_from_lp_number (lp_nr);
3531 SET_BIT (r_reachable, region->index);
3532 SET_BIT (lp_reachable, lp_nr);
3535 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3536 switch (gimple_code (stmt))
3538 case GIMPLE_RESX:
3539 SET_BIT (r_reachable, gimple_resx_region (stmt));
3540 break;
3541 case GIMPLE_EH_DISPATCH:
3542 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3543 break;
3544 default:
3545 break;
3550 if (dump_file)
3552 fprintf (dump_file, "Before removal of unreachable regions:\n");
3553 dump_eh_tree (dump_file, cfun);
3554 fprintf (dump_file, "Reachable regions: ");
3555 dump_sbitmap_file (dump_file, r_reachable);
3556 fprintf (dump_file, "Reachable landing pads: ");
3557 dump_sbitmap_file (dump_file, lp_reachable);
3560 for (r_nr = 1;
3561 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3562 if (region && !TEST_BIT (r_reachable, r_nr))
3564 if (dump_file)
3565 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3566 remove_eh_handler (region);
3569 for (lp_nr = 1;
3570 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3571 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3573 if (dump_file)
3574 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3575 remove_eh_landing_pad (lp);
3578 if (dump_file)
3580 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3581 dump_eh_tree (dump_file, cfun);
3582 fprintf (dump_file, "\n\n");
3585 sbitmap_free (r_reachable);
3586 sbitmap_free (lp_reachable);
3588 #ifdef ENABLE_CHECKING
3589 verify_eh_tree (cfun);
3590 #endif
3593 /* Remove unreachable handlers if any landing pads have been removed after
3594 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3596 void
3597 maybe_remove_unreachable_handlers (void)
3599 eh_landing_pad lp;
3600 int i;
3602 if (cfun->eh == NULL)
3603 return;
3605 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3606 if (lp && lp->post_landing_pad)
3608 if (label_to_block (lp->post_landing_pad) == NULL)
3610 remove_unreachable_handlers ();
3611 return;
3616 /* Remove regions that do not have landing pads. This assumes
3617 that remove_unreachable_handlers has already been run, and
3618 that we've just manipulated the landing pads since then. */
3620 static void
3621 remove_unreachable_handlers_no_lp (void)
3623 eh_region r;
3624 int i;
3625 sbitmap r_reachable;
3626 basic_block bb;
3628 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3629 sbitmap_zero (r_reachable);
3631 FOR_EACH_BB (bb)
3633 gimple stmt = last_stmt (bb);
3634 if (stmt)
3635 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3636 switch (gimple_code (stmt))
3638 case GIMPLE_RESX:
3639 SET_BIT (r_reachable, gimple_resx_region (stmt));
3640 break;
3641 case GIMPLE_EH_DISPATCH:
3642 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3643 break;
3644 default:
3645 break;
3649 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3650 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3651 && !TEST_BIT (r_reachable, i))
3653 if (dump_file)
3654 fprintf (dump_file, "Removing unreachable region %d\n", i);
3655 remove_eh_handler (r);
3658 sbitmap_free (r_reachable);
3661 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3662 optimisticaly split all sorts of edges, including EH edges. The
3663 optimization passes in between may not have needed them; if not,
3664 we should undo the split.
3666 Recognize this case by having one EH edge incoming to the BB and
3667 one normal edge outgoing; BB should be empty apart from the
3668 post_landing_pad label.
3670 Note that this is slightly different from the empty handler case
3671 handled by cleanup_empty_eh, in that the actual handler may yet
3672 have actual code but the landing pad has been separated from the
3673 handler. As such, cleanup_empty_eh relies on this transformation
3674 having been done first. */
3676 static bool
3677 unsplit_eh (eh_landing_pad lp)
3679 basic_block bb = label_to_block (lp->post_landing_pad);
3680 gimple_stmt_iterator gsi;
3681 edge e_in, e_out;
3683 /* Quickly check the edge counts on BB for singularity. */
3684 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3685 return false;
3686 e_in = EDGE_PRED (bb, 0);
3687 e_out = EDGE_SUCC (bb, 0);
3689 /* Input edge must be EH and output edge must be normal. */
3690 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3691 return false;
3693 /* The block must be empty except for the labels and debug insns. */
3694 gsi = gsi_after_labels (bb);
3695 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3696 gsi_next_nondebug (&gsi);
3697 if (!gsi_end_p (gsi))
3698 return false;
3700 /* The destination block must not already have a landing pad
3701 for a different region. */
3702 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3704 gimple stmt = gsi_stmt (gsi);
3705 tree lab;
3706 int lp_nr;
3708 if (gimple_code (stmt) != GIMPLE_LABEL)
3709 break;
3710 lab = gimple_label_label (stmt);
3711 lp_nr = EH_LANDING_PAD_NR (lab);
3712 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3713 return false;
3716 /* The new destination block must not already be a destination of
3717 the source block, lest we merge fallthru and eh edges and get
3718 all sorts of confused. */
3719 if (find_edge (e_in->src, e_out->dest))
3720 return false;
3722 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3723 thought this should have been cleaned up by a phicprop pass, but
3724 that doesn't appear to handle virtuals. Propagate by hand. */
3725 if (!gimple_seq_empty_p (phi_nodes (bb)))
3727 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3729 gimple use_stmt, phi = gsi_stmt (gsi);
3730 tree lhs = gimple_phi_result (phi);
3731 tree rhs = gimple_phi_arg_def (phi, 0);
3732 use_operand_p use_p;
3733 imm_use_iterator iter;
3735 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3737 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3738 SET_USE (use_p, rhs);
3741 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3742 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3744 remove_phi_node (&gsi, true);
3748 if (dump_file && (dump_flags & TDF_DETAILS))
3749 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3750 lp->index, e_out->dest->index);
3752 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3753 a successor edge, humor it. But do the real CFG change with the
3754 predecessor of E_OUT in order to preserve the ordering of arguments
3755 to the PHI nodes in E_OUT->DEST. */
3756 redirect_eh_edge_1 (e_in, e_out->dest, false);
3757 redirect_edge_pred (e_out, e_in->src);
3758 e_out->flags = e_in->flags;
3759 e_out->probability = e_in->probability;
3760 e_out->count = e_in->count;
3761 remove_edge (e_in);
3763 return true;
3766 /* Examine each landing pad block and see if it matches unsplit_eh. */
3768 static bool
3769 unsplit_all_eh (void)
3771 bool changed = false;
3772 eh_landing_pad lp;
3773 int i;
3775 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3776 if (lp)
3777 changed |= unsplit_eh (lp);
3779 return changed;
3782 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3783 to OLD_BB to NEW_BB; return true on success, false on failure.
3785 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3786 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3787 Virtual PHIs may be deleted and marked for renaming. */
3789 static bool
3790 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3791 edge old_bb_out, bool change_region)
3793 gimple_stmt_iterator ngsi, ogsi;
3794 edge_iterator ei;
3795 edge e;
3796 bitmap rename_virts;
3797 bitmap ophi_handled;
3799 /* The destination block must not be a regular successor for any
3800 of the preds of the landing pad. Thus, avoid turning
3801 <..>
3802 | \ EH
3803 | <..>
3805 <..>
3806 into
3807 <..>
3808 | | EH
3809 <..>
3810 which CFG verification would choke on. See PR45172 and PR51089. */
3811 FOR_EACH_EDGE (e, ei, old_bb->preds)
3812 if (find_edge (e->src, new_bb))
3813 return false;
3815 FOR_EACH_EDGE (e, ei, old_bb->preds)
3816 redirect_edge_var_map_clear (e);
3818 ophi_handled = BITMAP_ALLOC (NULL);
3819 rename_virts = BITMAP_ALLOC (NULL);
3821 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3822 for the edges we're going to move. */
3823 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3825 gimple ophi, nphi = gsi_stmt (ngsi);
3826 tree nresult, nop;
3828 nresult = gimple_phi_result (nphi);
3829 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3831 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3832 the source ssa_name. */
3833 ophi = NULL;
3834 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3836 ophi = gsi_stmt (ogsi);
3837 if (gimple_phi_result (ophi) == nop)
3838 break;
3839 ophi = NULL;
3842 /* If we did find the corresponding PHI, copy those inputs. */
3843 if (ophi)
3845 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3846 if (!has_single_use (nop))
3848 imm_use_iterator imm_iter;
3849 use_operand_p use_p;
3851 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3853 if (!gimple_debug_bind_p (USE_STMT (use_p))
3854 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3855 || gimple_bb (USE_STMT (use_p)) != new_bb))
3856 goto fail;
3859 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3860 FOR_EACH_EDGE (e, ei, old_bb->preds)
3862 location_t oloc;
3863 tree oop;
3865 if ((e->flags & EDGE_EH) == 0)
3866 continue;
3867 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3868 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3869 redirect_edge_var_map_add (e, nresult, oop, oloc);
3872 /* If we didn't find the PHI, but it's a VOP, remember to rename
3873 it later, assuming all other tests succeed. */
3874 else if (virtual_operand_p (nresult))
3875 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3876 /* If we didn't find the PHI, and it's a real variable, we know
3877 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3878 variable is unchanged from input to the block and we can simply
3879 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3880 else
3882 location_t nloc
3883 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3884 FOR_EACH_EDGE (e, ei, old_bb->preds)
3885 redirect_edge_var_map_add (e, nresult, nop, nloc);
3889 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3890 we don't know what values from the other edges into NEW_BB to use. */
3891 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3893 gimple ophi = gsi_stmt (ogsi);
3894 tree oresult = gimple_phi_result (ophi);
3895 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3896 goto fail;
3899 /* At this point we know that the merge will succeed. Remove the PHI
3900 nodes for the virtuals that we want to rename. */
3901 if (!bitmap_empty_p (rename_virts))
3903 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3905 gimple nphi = gsi_stmt (ngsi);
3906 tree nresult = gimple_phi_result (nphi);
3907 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3909 mark_virtual_phi_result_for_renaming (nphi);
3910 remove_phi_node (&ngsi, true);
3912 else
3913 gsi_next (&ngsi);
3917 /* Finally, move the edges and update the PHIs. */
3918 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3919 if (e->flags & EDGE_EH)
3921 /* ??? CFG manipluation routines do not try to update loop
3922 form on edge redirection. Do so manually here for now. */
3923 /* If we redirect a loop entry or latch edge that will either create
3924 a multiple entry loop or rotate the loop. If the loops merge
3925 we may have created a loop with multiple latches.
3926 All of this isn't easily fixed thus cancel the affected loop
3927 and mark the other loop as possibly having multiple latches. */
3928 if (current_loops
3929 && e->dest == e->dest->loop_father->header)
3931 e->dest->loop_father->header = NULL;
3932 e->dest->loop_father->latch = NULL;
3933 new_bb->loop_father->latch = NULL;
3934 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3936 redirect_eh_edge_1 (e, new_bb, change_region);
3937 redirect_edge_succ (e, new_bb);
3938 flush_pending_stmts (e);
3940 else
3941 ei_next (&ei);
3943 BITMAP_FREE (ophi_handled);
3944 BITMAP_FREE (rename_virts);
3945 return true;
3947 fail:
3948 FOR_EACH_EDGE (e, ei, old_bb->preds)
3949 redirect_edge_var_map_clear (e);
3950 BITMAP_FREE (ophi_handled);
3951 BITMAP_FREE (rename_virts);
3952 return false;
3955 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3956 old region to NEW_REGION at BB. */
3958 static void
3959 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3960 eh_landing_pad lp, eh_region new_region)
3962 gimple_stmt_iterator gsi;
3963 eh_landing_pad *pp;
3965 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3966 continue;
3967 *pp = lp->next_lp;
3969 lp->region = new_region;
3970 lp->next_lp = new_region->landing_pads;
3971 new_region->landing_pads = lp;
3973 /* Delete the RESX that was matched within the empty handler block. */
3974 gsi = gsi_last_bb (bb);
3975 unlink_stmt_vdef (gsi_stmt (gsi));
3976 gsi_remove (&gsi, true);
3978 /* Clean up E_OUT for the fallthru. */
3979 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3980 e_out->probability = REG_BR_PROB_BASE;
3983 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3984 unsplitting than unsplit_eh was prepared to handle, e.g. when
3985 multiple incoming edges and phis are involved. */
3987 static bool
3988 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3990 gimple_stmt_iterator gsi;
3991 tree lab;
3993 /* We really ought not have totally lost everything following
3994 a landing pad label. Given that BB is empty, there had better
3995 be a successor. */
3996 gcc_assert (e_out != NULL);
3998 /* The destination block must not already have a landing pad
3999 for a different region. */
4000 lab = NULL;
4001 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4003 gimple stmt = gsi_stmt (gsi);
4004 int lp_nr;
4006 if (gimple_code (stmt) != GIMPLE_LABEL)
4007 break;
4008 lab = gimple_label_label (stmt);
4009 lp_nr = EH_LANDING_PAD_NR (lab);
4010 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4011 return false;
4014 /* Attempt to move the PHIs into the successor block. */
4015 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4017 if (dump_file && (dump_flags & TDF_DETAILS))
4018 fprintf (dump_file,
4019 "Unsplit EH landing pad %d to block %i "
4020 "(via cleanup_empty_eh).\n",
4021 lp->index, e_out->dest->index);
4022 return true;
4025 return false;
4028 /* Return true if edge E_FIRST is part of an empty infinite loop
4029 or leads to such a loop through a series of single successor
4030 empty bbs. */
4032 static bool
4033 infinite_empty_loop_p (edge e_first)
4035 bool inf_loop = false;
4036 edge e;
4038 if (e_first->dest == e_first->src)
4039 return true;
4041 e_first->src->aux = (void *) 1;
4042 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4044 gimple_stmt_iterator gsi;
4045 if (e->dest->aux)
4047 inf_loop = true;
4048 break;
4050 e->dest->aux = (void *) 1;
4051 gsi = gsi_after_labels (e->dest);
4052 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4053 gsi_next_nondebug (&gsi);
4054 if (!gsi_end_p (gsi))
4055 break;
4057 e_first->src->aux = NULL;
4058 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4059 e->dest->aux = NULL;
4061 return inf_loop;
4064 /* Examine the block associated with LP to determine if it's an empty
4065 handler for its EH region. If so, attempt to redirect EH edges to
4066 an outer region. Return true the CFG was updated in any way. This
4067 is similar to jump forwarding, just across EH edges. */
4069 static bool
4070 cleanup_empty_eh (eh_landing_pad lp)
4072 basic_block bb = label_to_block (lp->post_landing_pad);
4073 gimple_stmt_iterator gsi;
4074 gimple resx;
4075 eh_region new_region;
4076 edge_iterator ei;
4077 edge e, e_out;
4078 bool has_non_eh_pred;
4079 bool ret = false;
4080 int new_lp_nr;
4082 /* There can be zero or one edges out of BB. This is the quickest test. */
4083 switch (EDGE_COUNT (bb->succs))
4085 case 0:
4086 e_out = NULL;
4087 break;
4088 case 1:
4089 e_out = EDGE_SUCC (bb, 0);
4090 break;
4091 default:
4092 return false;
4095 resx = last_stmt (bb);
4096 if (resx && is_gimple_resx (resx))
4098 if (stmt_can_throw_external (resx))
4099 optimize_clobbers (bb);
4100 else if (sink_clobbers (bb))
4101 ret = true;
4104 gsi = gsi_after_labels (bb);
4106 /* Make sure to skip debug statements. */
4107 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4108 gsi_next_nondebug (&gsi);
4110 /* If the block is totally empty, look for more unsplitting cases. */
4111 if (gsi_end_p (gsi))
4113 /* For the degenerate case of an infinite loop bail out. */
4114 if (infinite_empty_loop_p (e_out))
4115 return ret;
4117 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4120 /* The block should consist only of a single RESX statement, modulo a
4121 preceding call to __builtin_stack_restore if there is no outgoing
4122 edge, since the call can be eliminated in this case. */
4123 resx = gsi_stmt (gsi);
4124 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4126 gsi_next (&gsi);
4127 resx = gsi_stmt (gsi);
4129 if (!is_gimple_resx (resx))
4130 return ret;
4131 gcc_assert (gsi_one_before_end_p (gsi));
4133 /* Determine if there are non-EH edges, or resx edges into the handler. */
4134 has_non_eh_pred = false;
4135 FOR_EACH_EDGE (e, ei, bb->preds)
4136 if (!(e->flags & EDGE_EH))
4137 has_non_eh_pred = true;
4139 /* Find the handler that's outer of the empty handler by looking at
4140 where the RESX instruction was vectored. */
4141 new_lp_nr = lookup_stmt_eh_lp (resx);
4142 new_region = get_eh_region_from_lp_number (new_lp_nr);
4144 /* If there's no destination region within the current function,
4145 redirection is trivial via removing the throwing statements from
4146 the EH region, removing the EH edges, and allowing the block
4147 to go unreachable. */
4148 if (new_region == NULL)
4150 gcc_assert (e_out == NULL);
4151 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4152 if (e->flags & EDGE_EH)
4154 gimple stmt = last_stmt (e->src);
4155 remove_stmt_from_eh_lp (stmt);
4156 remove_edge (e);
4158 else
4159 ei_next (&ei);
4160 goto succeed;
4163 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4164 to handle the abort and allow the blocks to go unreachable. */
4165 if (new_region->type == ERT_MUST_NOT_THROW)
4167 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4168 if (e->flags & EDGE_EH)
4170 gimple stmt = last_stmt (e->src);
4171 remove_stmt_from_eh_lp (stmt);
4172 add_stmt_to_eh_lp (stmt, new_lp_nr);
4173 remove_edge (e);
4175 else
4176 ei_next (&ei);
4177 goto succeed;
4180 /* Try to redirect the EH edges and merge the PHIs into the destination
4181 landing pad block. If the merge succeeds, we'll already have redirected
4182 all the EH edges. The handler itself will go unreachable if there were
4183 no normal edges. */
4184 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4185 goto succeed;
4187 /* Finally, if all input edges are EH edges, then we can (potentially)
4188 reduce the number of transfers from the runtime by moving the landing
4189 pad from the original region to the new region. This is a win when
4190 we remove the last CLEANUP region along a particular exception
4191 propagation path. Since nothing changes except for the region with
4192 which the landing pad is associated, the PHI nodes do not need to be
4193 adjusted at all. */
4194 if (!has_non_eh_pred)
4196 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4197 if (dump_file && (dump_flags & TDF_DETAILS))
4198 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4199 lp->index, new_region->index);
4201 /* ??? The CFG didn't change, but we may have rendered the
4202 old EH region unreachable. Trigger a cleanup there. */
4203 return true;
4206 return ret;
4208 succeed:
4209 if (dump_file && (dump_flags & TDF_DETAILS))
4210 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4211 remove_eh_landing_pad (lp);
4212 return true;
4215 /* Do a post-order traversal of the EH region tree. Examine each
4216 post_landing_pad block and see if we can eliminate it as empty. */
4218 static bool
4219 cleanup_all_empty_eh (void)
4221 bool changed = false;
4222 eh_landing_pad lp;
4223 int i;
4225 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4226 if (lp)
4227 changed |= cleanup_empty_eh (lp);
4229 return changed;
4232 /* Perform cleanups and lowering of exception handling
4233 1) cleanups regions with handlers doing nothing are optimized out
4234 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4235 3) Info about regions that are containing instructions, and regions
4236 reachable via local EH edges is collected
4237 4) Eh tree is pruned for regions no longer neccesary.
4239 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4240 Unify those that have the same failure decl and locus.
4243 static unsigned int
4244 execute_cleanup_eh_1 (void)
4246 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4247 looking up unreachable landing pads. */
4248 remove_unreachable_handlers ();
4250 /* Watch out for the region tree vanishing due to all unreachable. */
4251 if (cfun->eh->region_tree && optimize)
4253 bool changed = false;
4255 changed |= unsplit_all_eh ();
4256 changed |= cleanup_all_empty_eh ();
4258 if (changed)
4260 free_dominance_info (CDI_DOMINATORS);
4261 free_dominance_info (CDI_POST_DOMINATORS);
4263 /* We delayed all basic block deletion, as we may have performed
4264 cleanups on EH edges while non-EH edges were still present. */
4265 delete_unreachable_blocks ();
4267 /* We manipulated the landing pads. Remove any region that no
4268 longer has a landing pad. */
4269 remove_unreachable_handlers_no_lp ();
4271 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4275 return 0;
4278 static unsigned int
4279 execute_cleanup_eh (void)
4281 int ret = execute_cleanup_eh_1 ();
4283 /* If the function no longer needs an EH personality routine
4284 clear it. This exposes cross-language inlining opportunities
4285 and avoids references to a never defined personality routine. */
4286 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4287 && function_needs_eh_personality (cfun) != eh_personality_lang)
4288 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4290 return ret;
4293 static bool
4294 gate_cleanup_eh (void)
4296 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4299 struct gimple_opt_pass pass_cleanup_eh = {
4301 GIMPLE_PASS,
4302 "ehcleanup", /* name */
4303 gate_cleanup_eh, /* gate */
4304 execute_cleanup_eh, /* execute */
4305 NULL, /* sub */
4306 NULL, /* next */
4307 0, /* static_pass_number */
4308 TV_TREE_EH, /* tv_id */
4309 PROP_gimple_lcf, /* properties_required */
4310 0, /* properties_provided */
4311 0, /* properties_destroyed */
4312 0, /* todo_flags_start */
4313 0 /* todo_flags_finish */
4317 /* Verify that BB containing STMT as the last statement, has precisely the
4318 edge that make_eh_edges would create. */
4320 DEBUG_FUNCTION bool
4321 verify_eh_edges (gimple stmt)
4323 basic_block bb = gimple_bb (stmt);
4324 eh_landing_pad lp = NULL;
4325 int lp_nr;
4326 edge_iterator ei;
4327 edge e, eh_edge;
4329 lp_nr = lookup_stmt_eh_lp (stmt);
4330 if (lp_nr > 0)
4331 lp = get_eh_landing_pad_from_number (lp_nr);
4333 eh_edge = NULL;
4334 FOR_EACH_EDGE (e, ei, bb->succs)
4336 if (e->flags & EDGE_EH)
4338 if (eh_edge)
4340 error ("BB %i has multiple EH edges", bb->index);
4341 return true;
4343 else
4344 eh_edge = e;
4348 if (lp == NULL)
4350 if (eh_edge)
4352 error ("BB %i can not throw but has an EH edge", bb->index);
4353 return true;
4355 return false;
4358 if (!stmt_could_throw_p (stmt))
4360 error ("BB %i last statement has incorrectly set lp", bb->index);
4361 return true;
4364 if (eh_edge == NULL)
4366 error ("BB %i is missing an EH edge", bb->index);
4367 return true;
4370 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4372 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4373 return true;
4376 return false;
4379 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4381 DEBUG_FUNCTION bool
4382 verify_eh_dispatch_edge (gimple stmt)
4384 eh_region r;
4385 eh_catch c;
4386 basic_block src, dst;
4387 bool want_fallthru = true;
4388 edge_iterator ei;
4389 edge e, fall_edge;
4391 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4392 src = gimple_bb (stmt);
4394 FOR_EACH_EDGE (e, ei, src->succs)
4395 gcc_assert (e->aux == NULL);
4397 switch (r->type)
4399 case ERT_TRY:
4400 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4402 dst = label_to_block (c->label);
4403 e = find_edge (src, dst);
4404 if (e == NULL)
4406 error ("BB %i is missing an edge", src->index);
4407 return true;
4409 e->aux = (void *)e;
4411 /* A catch-all handler doesn't have a fallthru. */
4412 if (c->type_list == NULL)
4414 want_fallthru = false;
4415 break;
4418 break;
4420 case ERT_ALLOWED_EXCEPTIONS:
4421 dst = label_to_block (r->u.allowed.label);
4422 e = find_edge (src, dst);
4423 if (e == NULL)
4425 error ("BB %i is missing an edge", src->index);
4426 return true;
4428 e->aux = (void *)e;
4429 break;
4431 default:
4432 gcc_unreachable ();
4435 fall_edge = NULL;
4436 FOR_EACH_EDGE (e, ei, src->succs)
4438 if (e->flags & EDGE_FALLTHRU)
4440 if (fall_edge != NULL)
4442 error ("BB %i too many fallthru edges", src->index);
4443 return true;
4445 fall_edge = e;
4447 else if (e->aux)
4448 e->aux = NULL;
4449 else
4451 error ("BB %i has incorrect edge", src->index);
4452 return true;
4455 if ((fall_edge != NULL) ^ want_fallthru)
4457 error ("BB %i has incorrect fallthru edge", src->index);
4458 return true;
4461 return false;