2009-06-10 Dave Korn <dave.korn.cygwin@gmail.com>
[official-gcc.git] / gcc / tree-eh.c
blob6a19bd52930ad3559c6bcf6a82785ea171e67805
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
40 #include "gimple.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
49 void
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
61 static int
62 struct_ptr_eq (const void *a, const void *b)
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
66 return *x == *y;
69 static hashval_t
70 struct_ptr_hash (const void *a)
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
77 /* Remember and lookup EH region data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
89 static void
90 record_stmt_eh_region (struct eh_region_d *region, gimple t)
92 if (!region)
93 return;
95 add_stmt_to_eh_region (t, get_eh_region_number (region));
99 /* Add statement T in function IFUN to EH region NUM. */
101 void
102 add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num)
104 struct throw_stmt_node *n;
105 void **slot;
107 gcc_assert (num >= 0);
108 gcc_assert (gimple_code (t) != GIMPLE_RESX);
110 n = GGC_NEW (struct throw_stmt_node);
111 n->stmt = t;
112 n->region_nr = num;
114 if (!get_eh_throw_stmt_table (ifun))
115 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
116 struct_ptr_eq,
117 ggc_free));
119 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
120 gcc_assert (!*slot);
121 *slot = n;
125 /* Add statement T in the current function (cfun) to EH region number
126 NUM. */
128 void
129 add_stmt_to_eh_region (gimple t, int num)
131 add_stmt_to_eh_region_fn (cfun, t, num);
135 /* Remove statement T in function IFUN from the EH region holding it. */
137 bool
138 remove_stmt_from_eh_region_fn (struct function *ifun, gimple t)
140 struct throw_stmt_node dummy;
141 void **slot;
143 if (!get_eh_throw_stmt_table (ifun))
144 return false;
146 dummy.stmt = t;
147 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
148 NO_INSERT);
149 if (slot)
151 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
152 return true;
154 else
155 return false;
159 /* Remove statement T in the current function (cfun) from the EH
160 region holding it. */
162 bool
163 remove_stmt_from_eh_region (gimple t)
165 return remove_stmt_from_eh_region_fn (cfun, t);
168 /* Determine if statement T is inside an EH region in function IFUN.
169 Return the EH region number if found, return -2 if IFUN does not
170 have an EH table and -1 if T could not be found in IFUN's EH region
171 table. */
174 lookup_stmt_eh_region_fn (struct function *ifun, gimple t)
176 struct throw_stmt_node *p, n;
178 if (!get_eh_throw_stmt_table (ifun))
179 return -2;
181 n.stmt = t;
182 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n);
183 return (p ? p->region_nr : -1);
187 /* Determine if statement T is inside an EH region in the current
188 function (cfun). Return the EH region number if found, return -2
189 if cfun does not have an EH table and -1 if T could not be found in
190 cfun's EH region table. */
193 lookup_stmt_eh_region (gimple t)
195 /* We can get called from initialized data when -fnon-call-exceptions
196 is on; prevent crash. */
197 if (!cfun)
198 return -1;
200 return lookup_stmt_eh_region_fn (cfun, t);
204 /* Determine if expression T is inside an EH region in the current
205 function (cfun). Return the EH region number if found, return -2
206 if IFUN does not have an EH table and -1 if T could not be found in
207 IFUN's EH region table. */
210 lookup_expr_eh_region (tree t)
212 /* We can get called from initialized data when -fnon-call-exceptions
213 is on; prevent crash. */
214 if (!cfun)
215 return -1;
217 if (!get_eh_throw_stmt_table (cfun))
218 return -2;
220 if (t && EXPR_P (t))
222 tree_ann_common_t ann = tree_common_ann (t);
223 if (ann)
224 return (int) ann->rn;
227 return -1;
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
232 nodes and LABEL_DECL nodes. We will use this during the second phase to
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
235 struct finally_tree_node
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a
239 tree) leaves the TRY block, its necessary to record a tree in
240 this field. Thus a treemple is used. */
241 treemple child;
242 gimple parent;
245 /* Note that this table is *not* marked GTY. It is short-lived. */
246 static htab_t finally_tree;
248 static void
249 record_in_finally_tree (treemple child, gimple parent)
251 struct finally_tree_node *n;
252 void **slot;
254 n = XNEW (struct finally_tree_node);
255 n->child = child;
256 n->parent = parent;
258 slot = htab_find_slot (finally_tree, n, INSERT);
259 gcc_assert (!*slot);
260 *slot = n;
263 static void
264 collect_finally_tree (gimple stmt, gimple region);
266 /* Go through the gimple sequence. Works with collect_finally_tree to
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
269 static void
270 collect_finally_tree_1 (gimple_seq seq, gimple region)
272 gimple_stmt_iterator gsi;
274 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
275 collect_finally_tree (gsi_stmt (gsi), region);
278 static void
279 collect_finally_tree (gimple stmt, gimple region)
281 treemple temp;
283 switch (gimple_code (stmt))
285 case GIMPLE_LABEL:
286 temp.t = gimple_label_label (stmt);
287 record_in_finally_tree (temp, region);
288 break;
290 case GIMPLE_TRY:
291 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
293 temp.g = stmt;
294 record_in_finally_tree (temp, region);
295 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
296 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
298 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
300 collect_finally_tree_1 (gimple_try_eval (stmt), region);
301 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
303 break;
305 case GIMPLE_CATCH:
306 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
307 break;
309 case GIMPLE_EH_FILTER:
310 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
311 break;
313 default:
314 /* A type, a decl, or some kind of statement that we're not
315 interested in. Don't walk them. */
316 break;
321 /* Use the finally tree to determine if a jump from START to TARGET
322 would leave the try_finally node that START lives in. */
324 static bool
325 outside_finally_tree (treemple start, gimple target)
327 struct finally_tree_node n, *p;
331 n.child = start;
332 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
333 if (!p)
334 return true;
335 start.g = p->parent;
337 while (start.g != target);
339 return false;
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
343 nodes into a set of gotos, magic labels, and eh regions.
344 The eh region creation is straight-forward, but frobbing all the gotos
345 and such into shape isn't. */
347 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
348 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
349 The idea is to record a gimple statement for everything except for
350 the conditionals, which get their labels recorded. Since labels are
351 of type 'tree', we need this node to store both gimple and tree
352 objects. REPL_STMT is the sequence used to replace the goto/return
353 statement. CONT_STMT is used to store the statement that allows
354 the return/goto to jump to the original destination. */
356 struct goto_queue_node
358 treemple stmt;
359 gimple_seq repl_stmt;
360 gimple cont_stmt;
361 int index;
362 /* This is used when index >= 0 to indicate that stmt is a label (as
363 opposed to a goto stmt). */
364 int is_label;
367 /* State of the world while lowering. */
369 struct leh_state
371 /* What's "current" while constructing the eh region tree. These
372 correspond to variables of the same name in cfun->eh, which we
373 don't have easy access to. */
374 struct eh_region_d *cur_region;
376 /* Processing of TRY_FINALLY requires a bit more state. This is
377 split out into a separate structure so that we don't have to
378 copy so much when processing other nodes. */
379 struct leh_tf_state *tf;
382 struct leh_tf_state
384 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
385 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
386 this so that outside_finally_tree can reliably reference the tree used
387 in the collect_finally_tree data structures. */
388 gimple try_finally_expr;
389 gimple top_p;
390 /* While lowering a top_p usually it is expanded into multiple statements,
391 thus we need the following field to store them. */
392 gimple_seq top_p_seq;
394 /* The state outside this try_finally node. */
395 struct leh_state *outer;
397 /* The exception region created for it. */
398 struct eh_region_d *region;
400 /* The goto queue. */
401 struct goto_queue_node *goto_queue;
402 size_t goto_queue_size;
403 size_t goto_queue_active;
405 /* Pointer map to help in searching goto_queue when it is large. */
406 struct pointer_map_t *goto_queue_map;
408 /* The set of unique labels seen as entries in the goto queue. */
409 VEC(tree,heap) *dest_array;
411 /* A label to be added at the end of the completed transformed
412 sequence. It will be set if may_fallthru was true *at one time*,
413 though subsequent transformations may have cleared that flag. */
414 tree fallthru_label;
416 /* A label that has been registered with except.c to be the
417 landing pad for this try block. */
418 tree eh_label;
420 /* True if it is possible to fall out the bottom of the try block.
421 Cleared if the fallthru is converted to a goto. */
422 bool may_fallthru;
424 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
425 bool may_return;
427 /* True if the finally block can receive an exception edge.
428 Cleared if the exception case is handled by code duplication. */
429 bool may_throw;
432 static gimple_seq lower_eh_filter (struct leh_state *, gimple);
434 /* Search for STMT in the goto queue. Return the replacement,
435 or null if the statement isn't in the queue. */
437 #define LARGE_GOTO_QUEUE 20
439 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
441 static gimple_seq
442 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
444 unsigned int i;
445 void **slot;
447 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
449 for (i = 0; i < tf->goto_queue_active; i++)
450 if ( tf->goto_queue[i].stmt.g == stmt.g)
451 return tf->goto_queue[i].repl_stmt;
452 return NULL;
455 /* If we have a large number of entries in the goto_queue, create a
456 pointer map and use that for searching. */
458 if (!tf->goto_queue_map)
460 tf->goto_queue_map = pointer_map_create ();
461 for (i = 0; i < tf->goto_queue_active; i++)
463 slot = pointer_map_insert (tf->goto_queue_map,
464 tf->goto_queue[i].stmt.g);
465 gcc_assert (*slot == NULL);
466 *slot = &tf->goto_queue[i];
470 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
471 if (slot != NULL)
472 return (((struct goto_queue_node *) *slot)->repl_stmt);
474 return NULL;
477 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
478 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
479 then we can just splat it in, otherwise we add the new stmts immediately
480 after the GIMPLE_COND and redirect. */
482 static void
483 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
484 gimple_stmt_iterator *gsi)
486 tree label;
487 gimple_seq new_seq;
488 treemple temp;
490 temp.tp = tp;
491 new_seq = find_goto_replacement (tf, temp);
492 if (!new_seq)
493 return;
495 if (gimple_seq_singleton_p (new_seq)
496 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
498 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
499 return;
502 label = create_artificial_label ();
503 /* Set the new label for the GIMPLE_COND */
504 *tp = label;
506 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
507 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
510 /* The real work of replace_goto_queue. Returns with TSI updated to
511 point to the next statement. */
513 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
515 static void
516 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
517 gimple_stmt_iterator *gsi)
519 gimple_seq seq;
520 treemple temp;
521 temp.g = NULL;
523 switch (gimple_code (stmt))
525 case GIMPLE_GOTO:
526 case GIMPLE_RETURN:
527 temp.g = stmt;
528 seq = find_goto_replacement (tf, temp);
529 if (seq)
531 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
532 gsi_remove (gsi, false);
533 return;
535 break;
537 case GIMPLE_COND:
538 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
539 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
540 break;
542 case GIMPLE_TRY:
543 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
544 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
545 break;
546 case GIMPLE_CATCH:
547 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
548 break;
549 case GIMPLE_EH_FILTER:
550 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
551 break;
553 default:
554 /* These won't have gotos in them. */
555 break;
558 gsi_next (gsi);
561 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
563 static void
564 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
566 gimple_stmt_iterator gsi = gsi_start (seq);
568 while (!gsi_end_p (gsi))
569 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
572 /* Replace all goto queue members. */
574 static void
575 replace_goto_queue (struct leh_tf_state *tf)
577 if (tf->goto_queue_active == 0)
578 return;
579 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
582 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
583 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
584 a gimple return. */
586 static void
587 record_in_goto_queue (struct leh_tf_state *tf,
588 treemple new_stmt,
589 int index,
590 bool is_label)
592 size_t active, size;
593 struct goto_queue_node *q;
595 gcc_assert (!tf->goto_queue_map);
597 active = tf->goto_queue_active;
598 size = tf->goto_queue_size;
599 if (active >= size)
601 size = (size ? size * 2 : 32);
602 tf->goto_queue_size = size;
603 tf->goto_queue
604 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
607 q = &tf->goto_queue[active];
608 tf->goto_queue_active = active + 1;
610 memset (q, 0, sizeof (*q));
611 q->stmt = new_stmt;
612 q->index = index;
613 q->is_label = is_label;
616 /* Record the LABEL label in the goto queue contained in TF.
617 TF is not null. */
619 static void
620 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
622 int index;
623 treemple temp, new_stmt;
625 if (!label)
626 return;
628 /* Computed and non-local gotos do not get processed. Given
629 their nature we can neither tell whether we've escaped the
630 finally block nor redirect them if we knew. */
631 if (TREE_CODE (label) != LABEL_DECL)
632 return;
634 /* No need to record gotos that don't leave the try block. */
635 temp.t = label;
636 if (!outside_finally_tree (temp, tf->try_finally_expr))
637 return;
639 if (! tf->dest_array)
641 tf->dest_array = VEC_alloc (tree, heap, 10);
642 VEC_quick_push (tree, tf->dest_array, label);
643 index = 0;
645 else
647 int n = VEC_length (tree, tf->dest_array);
648 for (index = 0; index < n; ++index)
649 if (VEC_index (tree, tf->dest_array, index) == label)
650 break;
651 if (index == n)
652 VEC_safe_push (tree, heap, tf->dest_array, label);
655 /* In the case of a GOTO we want to record the destination label,
656 since with a GIMPLE_COND we have an easy access to the then/else
657 labels. */
658 new_stmt = stmt;
659 record_in_goto_queue (tf, new_stmt, index, true);
663 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
664 node, and if so record that fact in the goto queue associated with that
665 try_finally node. */
667 static void
668 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
670 struct leh_tf_state *tf = state->tf;
671 treemple new_stmt;
673 if (!tf)
674 return;
676 switch (gimple_code (stmt))
678 case GIMPLE_COND:
679 new_stmt.tp = gimple_op_ptr (stmt, 2);
680 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
681 new_stmt.tp = gimple_op_ptr (stmt, 3);
682 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
683 break;
684 case GIMPLE_GOTO:
685 new_stmt.g = stmt;
686 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
687 break;
689 case GIMPLE_RETURN:
690 tf->may_return = true;
691 new_stmt.g = stmt;
692 record_in_goto_queue (tf, new_stmt, -1, false);
693 break;
695 default:
696 gcc_unreachable ();
701 #ifdef ENABLE_CHECKING
702 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
703 was in fact structured, and we've not yet done jump threading, then none
704 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
706 static void
707 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
709 struct leh_tf_state *tf = state->tf;
710 size_t i, n;
712 if (!tf)
713 return;
715 n = gimple_switch_num_labels (switch_expr);
717 for (i = 0; i < n; ++i)
719 treemple temp;
720 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
721 temp.t = lab;
722 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
725 #else
726 #define verify_norecord_switch_expr(state, switch_expr)
727 #endif
729 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
730 whatever is needed to finish the return. If MOD is non-null, insert it
731 before the new branch. RETURN_VALUE_P is a cache containing a temporary
732 variable to be used in manipulating the value returned from the function. */
734 static void
735 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
736 tree *return_value_p)
738 tree ret_expr;
739 gimple x;
741 /* In the case of a return, the queue node must be a gimple statement. */
742 gcc_assert (!q->is_label);
744 ret_expr = gimple_return_retval (q->stmt.g);
746 if (ret_expr)
748 if (!*return_value_p)
749 *return_value_p = ret_expr;
750 else
751 gcc_assert (*return_value_p == ret_expr);
752 q->cont_stmt = q->stmt.g;
753 /* The nasty part about redirecting the return value is that the
754 return value itself is to be computed before the FINALLY block
755 is executed. e.g.
757 int x;
758 int foo (void)
760 x = 0;
761 try {
762 return x;
763 } finally {
764 x++;
768 should return 0, not 1. Arrange for this to happen by copying
769 computed the return value into a local temporary. This also
770 allows us to redirect multiple return statements through the
771 same destination block; whether this is a net win or not really
772 depends, I guess, but it does make generation of the switch in
773 lower_try_finally_switch easier. */
775 if (TREE_CODE (ret_expr) == RESULT_DECL)
777 if (!*return_value_p)
778 *return_value_p = ret_expr;
779 else
780 gcc_assert (*return_value_p == ret_expr);
781 q->cont_stmt = q->stmt.g;
783 else
784 gcc_unreachable ();
786 else
787 /* If we don't return a value, all return statements are the same. */
788 q->cont_stmt = q->stmt.g;
790 if (!q->repl_stmt)
791 q->repl_stmt = gimple_seq_alloc ();
793 if (mod)
794 gimple_seq_add_seq (&q->repl_stmt, mod);
796 x = gimple_build_goto (finlab);
797 gimple_seq_add_stmt (&q->repl_stmt, x);
800 /* Similar, but easier, for GIMPLE_GOTO. */
802 static void
803 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
804 struct leh_tf_state *tf)
806 gimple x;
808 gcc_assert (q->is_label);
809 if (!q->repl_stmt)
810 q->repl_stmt = gimple_seq_alloc ();
812 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index));
814 if (mod)
815 gimple_seq_add_seq (&q->repl_stmt, mod);
817 x = gimple_build_goto (finlab);
818 gimple_seq_add_stmt (&q->repl_stmt, x);
821 /* We want to transform
822 try { body; } catch { stuff; }
824 body; goto over; lab: stuff; over:
826 TP is a GIMPLE_TRY node. LAB is the label that
827 should be placed before the second operand, or NULL. OVER is
828 an existing label that should be put at the exit, or NULL. */
830 static gimple_seq
831 frob_into_branch_around (gimple tp, tree lab, tree over)
833 gimple x;
834 gimple_seq cleanup, result;
836 cleanup = gimple_try_cleanup (tp);
837 result = gimple_try_eval (tp);
839 if (gimple_seq_may_fallthru (result))
841 if (!over)
842 over = create_artificial_label ();
843 x = gimple_build_goto (over);
844 gimple_seq_add_stmt (&result, x);
847 if (lab)
849 x = gimple_build_label (lab);
850 gimple_seq_add_stmt (&result, x);
853 gimple_seq_add_seq (&result, cleanup);
855 if (over)
857 x = gimple_build_label (over);
858 gimple_seq_add_stmt (&result, x);
860 return result;
863 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
864 Make sure to record all new labels found. */
866 static gimple_seq
867 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
869 gimple region = NULL;
870 gimple_seq new_seq;
872 new_seq = copy_gimple_seq_and_replace_locals (seq);
874 if (outer_state->tf)
875 region = outer_state->tf->try_finally_expr;
876 collect_finally_tree_1 (new_seq, region);
878 return new_seq;
881 /* A subroutine of lower_try_finally. Create a fallthru label for
882 the given try_finally state. The only tricky bit here is that
883 we have to make sure to record the label in our outer context. */
885 static tree
886 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
888 tree label = tf->fallthru_label;
889 treemple temp;
891 if (!label)
893 label = create_artificial_label ();
894 tf->fallthru_label = label;
895 if (tf->outer->tf)
897 temp.t = label;
898 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
901 return label;
904 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
905 returns non-null, then the language requires that the exception path out
906 of a try_finally be treated specially. To wit: the code within the
907 finally block may not itself throw an exception. We have two choices here.
908 First we can duplicate the finally block and wrap it in a must_not_throw
909 region. Second, we can generate code like
911 try {
912 finally_block;
913 } catch {
914 if (fintmp == eh_edge)
915 protect_cleanup_actions;
918 where "fintmp" is the temporary used in the switch statement generation
919 alternative considered below. For the nonce, we always choose the first
920 option.
922 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
924 static void
925 honor_protect_cleanup_actions (struct leh_state *outer_state,
926 struct leh_state *this_state,
927 struct leh_tf_state *tf)
929 gimple protect_cleanup_actions;
930 gimple_stmt_iterator gsi;
931 bool finally_may_fallthru;
932 gimple_seq finally;
933 gimple x;
935 /* First check for nothing to do. */
936 if (lang_protect_cleanup_actions)
937 protect_cleanup_actions = lang_protect_cleanup_actions ();
938 else
939 protect_cleanup_actions = NULL;
941 finally = gimple_try_cleanup (tf->top_p);
943 /* If the EH case of the finally block can fall through, this may be a
944 structure of the form
945 try {
946 try {
947 throw ...;
948 } cleanup {
949 try {
950 throw ...;
951 } catch (...) {
954 } catch (...) {
955 yyy;
957 E.g. with an inline destructor with an embedded try block. In this
958 case we must save the runtime EH data around the nested exception.
960 This complication means that any time the previous runtime data might
961 be used (via fallthru from the finally) we handle the eh case here,
962 whether or not protect_cleanup_actions is active. */
964 finally_may_fallthru = gimple_seq_may_fallthru (finally);
965 if (!finally_may_fallthru && !protect_cleanup_actions)
966 return;
968 /* Duplicate the FINALLY block. Only need to do this for try-finally,
969 and not for cleanups. */
970 if (this_state)
971 finally = lower_try_finally_dup_block (finally, outer_state);
973 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
974 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
975 to be in an enclosing scope, but needs to be implemented at this level
976 to avoid a nesting violation (see wrap_temporary_cleanups in
977 cp/decl.c). Since it's logically at an outer level, we should call
978 terminate before we get to it, so strip it away before adding the
979 MUST_NOT_THROW filter. */
980 gsi = gsi_start (finally);
981 x = gsi_stmt (gsi);
982 if (protect_cleanup_actions
983 && gimple_code (x) == GIMPLE_TRY
984 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
985 && gimple_try_catch_is_cleanup (x))
987 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
988 gsi_remove (&gsi, false);
991 /* Resume execution after the exception. Adding this now lets
992 lower_eh_filter not add unnecessary gotos, as it is clear that
993 we never fallthru from this copy of the finally block. */
994 if (finally_may_fallthru)
996 tree save_eptr, save_filt;
997 tree tmp;
999 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
1000 save_filt = create_tmp_var (integer_type_node, "save_filt");
1002 gsi = gsi_start (finally);
1003 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1004 x = gimple_build_assign (save_eptr, tmp);
1005 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1007 tmp = build0 (FILTER_EXPR, integer_type_node);
1008 x = gimple_build_assign (save_filt, tmp);
1009 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1011 gsi = gsi_last (finally);
1012 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1013 x = gimple_build_assign (tmp, save_eptr);
1014 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1016 tmp = build0 (FILTER_EXPR, integer_type_node);
1017 x = gimple_build_assign (tmp, save_filt);
1018 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1020 x = gimple_build_resx (get_eh_region_number (tf->region));
1021 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1024 /* Wrap the block with protect_cleanup_actions as the action. */
1025 if (protect_cleanup_actions)
1027 gimple_seq seq = NULL, failure = NULL;
1029 gimple_seq_add_stmt (&failure, protect_cleanup_actions);
1030 x = gimple_build_eh_filter (NULL, failure);
1031 gimple_eh_filter_set_must_not_throw (x, 1);
1033 gimple_seq_add_stmt (&seq, x);
1034 x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
1035 finally = lower_eh_filter (outer_state, x);
1037 else
1038 lower_eh_constructs_1 (outer_state, finally);
1040 /* Hook this up to the end of the existing try block. If we
1041 previously fell through the end, we'll have to branch around.
1042 This means adding a new goto, and adding it to the queue. */
1044 gsi = gsi_last (gimple_try_eval (tf->top_p));
1046 if (tf->may_fallthru)
1048 tree tmp;
1049 tmp = lower_try_finally_fallthru_label (tf);
1050 x = gimple_build_goto (tmp);
1051 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1053 if (this_state)
1054 maybe_record_in_goto_queue (this_state, x);
1056 tf->may_fallthru = false;
1059 x = gimple_build_label (tf->eh_label);
1060 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1061 gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
1063 /* Having now been handled, EH isn't to be considered with
1064 the rest of the outgoing edges. */
1065 tf->may_throw = false;
1068 /* A subroutine of lower_try_finally. We have determined that there is
1069 no fallthru edge out of the finally block. This means that there is
1070 no outgoing edge corresponding to any incoming edge. Restructure the
1071 try_finally node for this special case. */
1073 static void
1074 lower_try_finally_nofallthru (struct leh_state *state,
1075 struct leh_tf_state *tf)
1077 tree lab, return_val;
1078 gimple x;
1079 gimple_seq finally;
1080 struct goto_queue_node *q, *qe;
1082 if (tf->may_throw)
1083 lab = tf->eh_label;
1084 else
1085 lab = create_artificial_label ();
1087 /* We expect that tf->top_p is a GIMPLE_TRY. */
1088 finally = gimple_try_cleanup (tf->top_p);
1089 tf->top_p_seq = gimple_try_eval (tf->top_p);
1091 x = gimple_build_label (lab);
1092 gimple_seq_add_stmt (&tf->top_p_seq, x);
1094 return_val = NULL;
1095 q = tf->goto_queue;
1096 qe = q + tf->goto_queue_active;
1097 for (; q < qe; ++q)
1098 if (q->index < 0)
1099 do_return_redirection (q, lab, NULL, &return_val);
1100 else
1101 do_goto_redirection (q, lab, NULL, tf);
1103 replace_goto_queue (tf);
1105 lower_eh_constructs_1 (state, finally);
1106 gimple_seq_add_seq (&tf->top_p_seq, finally);
1109 /* A subroutine of lower_try_finally. We have determined that there is
1110 exactly one destination of the finally block. Restructure the
1111 try_finally node for this special case. */
1113 static void
1114 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1116 struct goto_queue_node *q, *qe;
1117 gimple x;
1118 gimple_seq finally;
1119 tree finally_label;
1121 finally = gimple_try_cleanup (tf->top_p);
1122 tf->top_p_seq = gimple_try_eval (tf->top_p);
1124 lower_eh_constructs_1 (state, finally);
1126 if (tf->may_throw)
1128 /* Only reachable via the exception edge. Add the given label to
1129 the head of the FINALLY block. Append a RESX at the end. */
1131 x = gimple_build_label (tf->eh_label);
1132 gimple_seq_add_stmt (&tf->top_p_seq, x);
1134 gimple_seq_add_seq (&tf->top_p_seq, finally);
1136 x = gimple_build_resx (get_eh_region_number (tf->region));
1138 gimple_seq_add_stmt (&tf->top_p_seq, x);
1140 return;
1143 if (tf->may_fallthru)
1145 /* Only reachable via the fallthru edge. Do nothing but let
1146 the two blocks run together; we'll fall out the bottom. */
1147 gimple_seq_add_seq (&tf->top_p_seq, finally);
1148 return;
1151 finally_label = create_artificial_label ();
1152 x = gimple_build_label (finally_label);
1153 gimple_seq_add_stmt (&tf->top_p_seq, x);
1155 gimple_seq_add_seq (&tf->top_p_seq, finally);
1157 q = tf->goto_queue;
1158 qe = q + tf->goto_queue_active;
1160 if (tf->may_return)
1162 /* Reachable by return expressions only. Redirect them. */
1163 tree return_val = NULL;
1164 for (; q < qe; ++q)
1165 do_return_redirection (q, finally_label, NULL, &return_val);
1166 replace_goto_queue (tf);
1168 else
1170 /* Reachable by goto expressions only. Redirect them. */
1171 for (; q < qe; ++q)
1172 do_goto_redirection (q, finally_label, NULL, tf);
1173 replace_goto_queue (tf);
1175 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1177 /* Reachable by goto to fallthru label only. Redirect it
1178 to the new label (already created, sadly), and do not
1179 emit the final branch out, or the fallthru label. */
1180 tf->fallthru_label = NULL;
1181 return;
1185 /* Place the original return/goto to the original destination
1186 immediately after the finally block. */
1187 x = tf->goto_queue[0].cont_stmt;
1188 gimple_seq_add_stmt (&tf->top_p_seq, x);
1189 maybe_record_in_goto_queue (state, x);
1192 /* A subroutine of lower_try_finally. There are multiple edges incoming
1193 and outgoing from the finally block. Implement this by duplicating the
1194 finally block for every destination. */
1196 static void
1197 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1199 gimple_seq finally;
1200 gimple_seq new_stmt;
1201 gimple_seq seq;
1202 gimple x;
1203 tree tmp;
1205 finally = gimple_try_cleanup (tf->top_p);
1206 tf->top_p_seq = gimple_try_eval (tf->top_p);
1207 new_stmt = NULL;
1209 if (tf->may_fallthru)
1211 seq = lower_try_finally_dup_block (finally, state);
1212 lower_eh_constructs_1 (state, seq);
1213 gimple_seq_add_seq (&new_stmt, seq);
1215 tmp = lower_try_finally_fallthru_label (tf);
1216 x = gimple_build_goto (tmp);
1217 gimple_seq_add_stmt (&new_stmt, x);
1220 if (tf->may_throw)
1222 x = gimple_build_label (tf->eh_label);
1223 gimple_seq_add_stmt (&new_stmt, x);
1225 seq = lower_try_finally_dup_block (finally, state);
1226 lower_eh_constructs_1 (state, seq);
1227 gimple_seq_add_seq (&new_stmt, seq);
1229 x = gimple_build_resx (get_eh_region_number (tf->region));
1230 gimple_seq_add_stmt (&new_stmt, x);
1233 if (tf->goto_queue)
1235 struct goto_queue_node *q, *qe;
1236 tree return_val = NULL;
1237 int return_index, index;
1238 struct labels_s
1240 struct goto_queue_node *q;
1241 tree label;
1242 } *labels;
1244 return_index = VEC_length (tree, tf->dest_array);
1245 labels = XCNEWVEC (struct labels_s, return_index + 1);
1247 q = tf->goto_queue;
1248 qe = q + tf->goto_queue_active;
1249 for (; q < qe; q++)
1251 index = q->index < 0 ? return_index : q->index;
1253 if (!labels[index].q)
1254 labels[index].q = q;
1257 for (index = 0; index < return_index + 1; index++)
1259 tree lab;
1261 q = labels[index].q;
1262 if (! q)
1263 continue;
1265 lab = labels[index].label = create_artificial_label ();
1267 if (index == return_index)
1268 do_return_redirection (q, lab, NULL, &return_val);
1269 else
1270 do_goto_redirection (q, lab, NULL, tf);
1272 x = gimple_build_label (lab);
1273 gimple_seq_add_stmt (&new_stmt, x);
1275 seq = lower_try_finally_dup_block (finally, state);
1276 lower_eh_constructs_1 (state, seq);
1277 gimple_seq_add_seq (&new_stmt, seq);
1279 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1280 maybe_record_in_goto_queue (state, q->cont_stmt);
1283 for (q = tf->goto_queue; q < qe; q++)
1285 tree lab;
1287 index = q->index < 0 ? return_index : q->index;
1289 if (labels[index].q == q)
1290 continue;
1292 lab = labels[index].label;
1294 if (index == return_index)
1295 do_return_redirection (q, lab, NULL, &return_val);
1296 else
1297 do_goto_redirection (q, lab, NULL, tf);
1300 replace_goto_queue (tf);
1301 free (labels);
1304 /* Need to link new stmts after running replace_goto_queue due
1305 to not wanting to process the same goto stmts twice. */
1306 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1309 /* A subroutine of lower_try_finally. There are multiple edges incoming
1310 and outgoing from the finally block. Implement this by instrumenting
1311 each incoming edge and creating a switch statement at the end of the
1312 finally block that branches to the appropriate destination. */
1314 static void
1315 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1317 struct goto_queue_node *q, *qe;
1318 tree return_val = NULL;
1319 tree finally_tmp, finally_label;
1320 int return_index, eh_index, fallthru_index;
1321 int nlabels, ndests, j, last_case_index;
1322 tree last_case;
1323 VEC (tree,heap) *case_label_vec;
1324 gimple_seq switch_body;
1325 gimple x;
1326 tree tmp;
1327 gimple switch_stmt;
1328 gimple_seq finally;
1329 struct pointer_map_t *cont_map = NULL;
1331 switch_body = gimple_seq_alloc ();
1333 /* Mash the TRY block to the head of the chain. */
1334 finally = gimple_try_cleanup (tf->top_p);
1335 tf->top_p_seq = gimple_try_eval (tf->top_p);
1337 /* Lower the finally block itself. */
1338 lower_eh_constructs_1 (state, finally);
1340 /* Prepare for switch statement generation. */
1341 nlabels = VEC_length (tree, tf->dest_array);
1342 return_index = nlabels;
1343 eh_index = return_index + tf->may_return;
1344 fallthru_index = eh_index + tf->may_throw;
1345 ndests = fallthru_index + tf->may_fallthru;
1347 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1348 finally_label = create_artificial_label ();
1350 /* We use VEC_quick_push on case_label_vec throughout this function,
1351 since we know the size in advance and allocate precisely as muce
1352 space as needed. */
1353 case_label_vec = VEC_alloc (tree, heap, ndests);
1354 last_case = NULL;
1355 last_case_index = 0;
1357 /* Begin inserting code for getting to the finally block. Things
1358 are done in this order to correspond to the sequence the code is
1359 layed out. */
1361 if (tf->may_fallthru)
1363 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1364 fallthru_index));
1365 gimple_seq_add_stmt (&tf->top_p_seq, x);
1367 if (tf->may_throw)
1369 x = gimple_build_goto (finally_label);
1370 gimple_seq_add_stmt (&tf->top_p_seq, x);
1374 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1375 build_int_cst (NULL_TREE, fallthru_index), NULL,
1376 create_artificial_label ());
1377 VEC_quick_push (tree, case_label_vec, last_case);
1378 last_case_index++;
1380 x = gimple_build_label (CASE_LABEL (last_case));
1381 gimple_seq_add_stmt (&switch_body, x);
1383 tmp = lower_try_finally_fallthru_label (tf);
1384 x = gimple_build_goto (tmp);
1385 gimple_seq_add_stmt (&switch_body, x);
1388 if (tf->may_throw)
1390 x = gimple_build_label (tf->eh_label);
1391 gimple_seq_add_stmt (&tf->top_p_seq, x);
1393 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1394 eh_index));
1395 gimple_seq_add_stmt (&tf->top_p_seq, x);
1397 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1398 build_int_cst (NULL_TREE, eh_index), NULL,
1399 create_artificial_label ());
1400 VEC_quick_push (tree, case_label_vec, last_case);
1401 last_case_index++;
1403 x = gimple_build_label (CASE_LABEL (last_case));
1404 gimple_seq_add_stmt (&switch_body, x);
1405 x = gimple_build_resx (get_eh_region_number (tf->region));
1406 gimple_seq_add_stmt (&switch_body, x);
1409 x = gimple_build_label (finally_label);
1410 gimple_seq_add_stmt (&tf->top_p_seq, x);
1412 gimple_seq_add_seq (&tf->top_p_seq, finally);
1414 /* Redirect each incoming goto edge. */
1415 q = tf->goto_queue;
1416 qe = q + tf->goto_queue_active;
1417 j = last_case_index + tf->may_return;
1418 /* Prepare the assignments to finally_tmp that are executed upon the
1419 entrance through a particular edge. */
1420 for (; q < qe; ++q)
1422 gimple_seq mod;
1423 int switch_id;
1424 unsigned int case_index;
1426 mod = gimple_seq_alloc ();
1428 if (q->index < 0)
1430 x = gimple_build_assign (finally_tmp,
1431 build_int_cst (integer_type_node,
1432 return_index));
1433 gimple_seq_add_stmt (&mod, x);
1434 do_return_redirection (q, finally_label, mod, &return_val);
1435 switch_id = return_index;
1437 else
1439 x = gimple_build_assign (finally_tmp,
1440 build_int_cst (integer_type_node, q->index));
1441 gimple_seq_add_stmt (&mod, x);
1442 do_goto_redirection (q, finally_label, mod, tf);
1443 switch_id = q->index;
1446 case_index = j + q->index;
1447 if (VEC_length (tree, case_label_vec) <= case_index
1448 || !VEC_index (tree, case_label_vec, case_index))
1450 tree case_lab;
1451 void **slot;
1452 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1453 build_int_cst (NULL_TREE, switch_id), NULL,
1454 NULL);
1455 /* We store the cont_stmt in the pointer map, so that we can recover
1456 it in the loop below. We don't create the new label while
1457 walking the goto_queue because pointers don't offer a stable
1458 order. */
1459 if (!cont_map)
1460 cont_map = pointer_map_create ();
1461 slot = pointer_map_insert (cont_map, case_lab);
1462 *slot = q->cont_stmt;
1463 VEC_quick_push (tree, case_label_vec, case_lab);
1466 for (j = last_case_index; j < last_case_index + nlabels; j++)
1468 tree label;
1469 gimple cont_stmt;
1470 void **slot;
1472 last_case = VEC_index (tree, case_label_vec, j);
1474 gcc_assert (last_case);
1475 gcc_assert (cont_map);
1477 slot = pointer_map_contains (cont_map, last_case);
1478 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1479 placeholder, it does not store an actual label, yet. */
1480 gcc_assert (slot);
1481 cont_stmt = *(gimple *) slot;
1483 label = create_artificial_label ();
1484 CASE_LABEL (last_case) = label;
1486 x = gimple_build_label (label);
1487 gimple_seq_add_stmt (&switch_body, x);
1488 gimple_seq_add_stmt (&switch_body, cont_stmt);
1489 maybe_record_in_goto_queue (state, cont_stmt);
1491 if (cont_map)
1492 pointer_map_destroy (cont_map);
1494 replace_goto_queue (tf);
1496 /* Make sure that the last case is the default label, as one is required.
1497 Then sort the labels, which is also required in GIMPLE. */
1498 CASE_LOW (last_case) = NULL;
1499 sort_case_labels (case_label_vec);
1501 /* Build the switch statement, setting last_case to be the default
1502 label. */
1503 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1504 case_label_vec);
1506 /* Need to link SWITCH_STMT after running replace_goto_queue
1507 due to not wanting to process the same goto stmts twice. */
1508 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1509 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1512 /* Decide whether or not we are going to duplicate the finally block.
1513 There are several considerations.
1515 First, if this is Java, then the finally block contains code
1516 written by the user. It has line numbers associated with it,
1517 so duplicating the block means it's difficult to set a breakpoint.
1518 Since controlling code generation via -g is verboten, we simply
1519 never duplicate code without optimization.
1521 Second, we'd like to prevent egregious code growth. One way to
1522 do this is to estimate the size of the finally block, multiply
1523 that by the number of copies we'd need to make, and compare against
1524 the estimate of the size of the switch machinery we'd have to add. */
1526 static bool
1527 decide_copy_try_finally (int ndests, gimple_seq finally)
1529 int f_estimate, sw_estimate;
1531 if (!optimize)
1532 return false;
1534 /* Finally estimate N times, plus N gotos. */
1535 f_estimate = count_insns_seq (finally, &eni_size_weights);
1536 f_estimate = (f_estimate + 1) * ndests;
1538 /* Switch statement (cost 10), N variable assignments, N gotos. */
1539 sw_estimate = 10 + 2 * ndests;
1541 /* Optimize for size clearly wants our best guess. */
1542 if (optimize_function_for_size_p (cfun))
1543 return f_estimate < sw_estimate;
1545 /* ??? These numbers are completely made up so far. */
1546 if (optimize > 1)
1547 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1548 else
1549 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1553 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1554 to a sequence of labels and blocks, plus the exception region trees
1555 that record all the magic. This is complicated by the need to
1556 arrange for the FINALLY block to be executed on all exits. */
1558 static gimple_seq
1559 lower_try_finally (struct leh_state *state, gimple tp)
1561 struct leh_tf_state this_tf;
1562 struct leh_state this_state;
1563 int ndests;
1565 /* Process the try block. */
1567 memset (&this_tf, 0, sizeof (this_tf));
1568 this_tf.try_finally_expr = tp;
1569 this_tf.top_p = tp;
1570 this_tf.outer = state;
1571 if (using_eh_for_cleanups_p)
1572 this_tf.region
1573 = gen_eh_region_cleanup (state->cur_region);
1574 else
1575 this_tf.region = NULL;
1577 this_state.cur_region = this_tf.region;
1578 this_state.tf = &this_tf;
1580 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1582 /* Determine if the try block is escaped through the bottom. */
1583 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1585 /* Determine if any exceptions are possible within the try block. */
1586 if (using_eh_for_cleanups_p)
1587 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1588 if (this_tf.may_throw)
1590 this_tf.eh_label = create_artificial_label ();
1591 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1592 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1595 /* Determine how many edges (still) reach the finally block. Or rather,
1596 how many destinations are reached by the finally block. Use this to
1597 determine how we process the finally block itself. */
1599 ndests = VEC_length (tree, this_tf.dest_array);
1600 ndests += this_tf.may_fallthru;
1601 ndests += this_tf.may_return;
1602 ndests += this_tf.may_throw;
1604 /* If the FINALLY block is not reachable, dike it out. */
1605 if (ndests == 0)
1607 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1608 gimple_try_set_cleanup (tp, NULL);
1610 /* If the finally block doesn't fall through, then any destination
1611 we might try to impose there isn't reached either. There may be
1612 some minor amount of cleanup and redirection still needed. */
1613 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1614 lower_try_finally_nofallthru (state, &this_tf);
1616 /* We can easily special-case redirection to a single destination. */
1617 else if (ndests == 1)
1618 lower_try_finally_onedest (state, &this_tf);
1619 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1620 lower_try_finally_copy (state, &this_tf);
1621 else
1622 lower_try_finally_switch (state, &this_tf);
1624 /* If someone requested we add a label at the end of the transformed
1625 block, do so. */
1626 if (this_tf.fallthru_label)
1628 /* This must be reached only if ndests == 0. */
1629 gimple x = gimple_build_label (this_tf.fallthru_label);
1630 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1633 VEC_free (tree, heap, this_tf.dest_array);
1634 if (this_tf.goto_queue)
1635 free (this_tf.goto_queue);
1636 if (this_tf.goto_queue_map)
1637 pointer_map_destroy (this_tf.goto_queue_map);
1639 return this_tf.top_p_seq;
1642 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1643 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1644 exception region trees that records all the magic. */
1646 static gimple_seq
1647 lower_catch (struct leh_state *state, gimple tp)
1649 struct eh_region_d *try_region;
1650 struct leh_state this_state;
1651 gimple_stmt_iterator gsi;
1652 tree out_label;
1654 try_region = gen_eh_region_try (state->cur_region);
1655 this_state.cur_region = try_region;
1656 this_state.tf = state->tf;
1658 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1660 if (!get_eh_region_may_contain_throw (try_region))
1662 return gimple_try_eval (tp);
1665 out_label = NULL;
1666 for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); )
1668 struct eh_region_d *catch_region;
1669 tree eh_label;
1670 gimple x, gcatch;
1672 gcatch = gsi_stmt (gsi);
1673 catch_region = gen_eh_region_catch (try_region,
1674 gimple_catch_types (gcatch));
1676 this_state.cur_region = catch_region;
1677 lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch));
1679 eh_label = create_artificial_label ();
1680 set_eh_region_tree_label (catch_region, eh_label);
1682 x = gimple_build_label (eh_label);
1683 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
1685 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch)))
1687 if (!out_label)
1688 out_label = create_artificial_label ();
1690 x = gimple_build_goto (out_label);
1691 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x);
1694 gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch),
1695 GSI_SAME_STMT);
1696 gsi_remove (&gsi, false);
1699 return frob_into_branch_around (tp, NULL, out_label);
1702 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1703 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1704 region trees that record all the magic. */
1706 static gimple_seq
1707 lower_eh_filter (struct leh_state *state, gimple tp)
1709 struct leh_state this_state;
1710 struct eh_region_d *this_region;
1711 gimple inner;
1712 tree eh_label;
1714 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1716 if (gimple_eh_filter_must_not_throw (inner))
1717 this_region = gen_eh_region_must_not_throw (state->cur_region);
1718 else
1719 this_region = gen_eh_region_allowed (state->cur_region,
1720 gimple_eh_filter_types (inner));
1721 this_state = *state;
1722 this_state.cur_region = this_region;
1724 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1726 if (!get_eh_region_may_contain_throw (this_region))
1728 return gimple_try_eval (tp);
1731 lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner));
1732 gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner));
1734 eh_label = create_artificial_label ();
1735 set_eh_region_tree_label (this_region, eh_label);
1737 return frob_into_branch_around (tp, eh_label, NULL);
1740 /* Implement a cleanup expression. This is similar to try-finally,
1741 except that we only execute the cleanup block for exception edges. */
1743 static gimple_seq
1744 lower_cleanup (struct leh_state *state, gimple tp)
1746 struct leh_state this_state;
1747 struct eh_region_d *this_region;
1748 struct leh_tf_state fake_tf;
1749 gimple_seq result;
1751 /* If not using eh, then exception-only cleanups are no-ops. */
1752 if (!flag_exceptions)
1754 result = gimple_try_eval (tp);
1755 lower_eh_constructs_1 (state, result);
1756 return result;
1759 this_region = gen_eh_region_cleanup (state->cur_region);
1760 this_state = *state;
1761 this_state.cur_region = this_region;
1763 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1765 if (!get_eh_region_may_contain_throw (this_region))
1767 return gimple_try_eval (tp);
1770 /* Build enough of a try-finally state so that we can reuse
1771 honor_protect_cleanup_actions. */
1772 memset (&fake_tf, 0, sizeof (fake_tf));
1773 fake_tf.top_p = tp;
1774 fake_tf.outer = state;
1775 fake_tf.region = this_region;
1776 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1777 fake_tf.may_throw = true;
1779 fake_tf.eh_label = create_artificial_label ();
1780 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1782 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1784 if (fake_tf.may_throw)
1786 /* In this case honor_protect_cleanup_actions had nothing to do,
1787 and we should process this normally. */
1788 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1789 result = frob_into_branch_around (tp, fake_tf.eh_label,
1790 fake_tf.fallthru_label);
1792 else
1794 /* In this case honor_protect_cleanup_actions did nearly all of
1795 the work. All we have left is to append the fallthru_label. */
1797 result = gimple_try_eval (tp);
1798 if (fake_tf.fallthru_label)
1800 gimple x = gimple_build_label (fake_tf.fallthru_label);
1801 gimple_seq_add_stmt (&result, x);
1804 return result;
1809 /* Main loop for lowering eh constructs. Also moves gsi to the next
1810 statement. */
1812 static void
1813 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1815 gimple_seq replace;
1816 gimple x;
1817 gimple stmt = gsi_stmt (*gsi);
1819 switch (gimple_code (stmt))
1821 case GIMPLE_CALL:
1822 case GIMPLE_ASSIGN:
1823 /* If the stmt can throw use a new temporary for the assignment
1824 to a LHS. This makes sure the old value of the LHS is
1825 available on the EH edge. */
1826 if (stmt_could_throw_p (stmt)
1827 && gimple_has_lhs (stmt)
1828 && !tree_could_throw_p (gimple_get_lhs (stmt))
1829 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1831 tree lhs = gimple_get_lhs (stmt);
1832 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1833 gimple s = gimple_build_assign (lhs, tmp);
1834 gimple_set_location (s, gimple_location (stmt));
1835 gimple_set_block (s, gimple_block (stmt));
1836 gimple_set_lhs (stmt, tmp);
1837 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1838 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1839 DECL_GIMPLE_REG_P (tmp) = 1;
1840 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1842 /* Look for things that can throw exceptions, and record them. */
1843 if (state->cur_region && stmt_could_throw_p (stmt))
1845 record_stmt_eh_region (state->cur_region, stmt);
1846 note_eh_region_may_contain_throw (state->cur_region);
1848 break;
1850 case GIMPLE_COND:
1851 case GIMPLE_GOTO:
1852 case GIMPLE_RETURN:
1853 maybe_record_in_goto_queue (state, stmt);
1854 break;
1856 case GIMPLE_SWITCH:
1857 verify_norecord_switch_expr (state, stmt);
1858 break;
1860 case GIMPLE_TRY:
1861 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1862 replace = lower_try_finally (state, stmt);
1863 else
1865 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1866 switch (gimple_code (x))
1868 case GIMPLE_CATCH:
1869 replace = lower_catch (state, stmt);
1870 break;
1871 case GIMPLE_EH_FILTER:
1872 replace = lower_eh_filter (state, stmt);
1873 break;
1874 default:
1875 replace = lower_cleanup (state, stmt);
1876 break;
1880 /* Remove the old stmt and insert the transformed sequence
1881 instead. */
1882 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1883 gsi_remove (gsi, true);
1885 /* Return since we don't want gsi_next () */
1886 return;
1888 default:
1889 /* A type, a decl, or some kind of statement that we're not
1890 interested in. Don't walk them. */
1891 break;
1894 gsi_next (gsi);
1897 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1899 static void
1900 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1902 gimple_stmt_iterator gsi;
1903 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1904 lower_eh_constructs_2 (state, &gsi);
1907 static unsigned int
1908 lower_eh_constructs (void)
1910 struct leh_state null_state;
1912 gimple_seq bodyp = gimple_body (current_function_decl);
1914 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1916 collect_finally_tree_1 (bodyp, NULL);
1918 memset (&null_state, 0, sizeof (null_state));
1919 lower_eh_constructs_1 (&null_state, bodyp);
1921 htab_delete (finally_tree);
1923 collect_eh_region_array ();
1924 return 0;
1927 struct gimple_opt_pass pass_lower_eh =
1930 GIMPLE_PASS,
1931 "eh", /* name */
1932 NULL, /* gate */
1933 lower_eh_constructs, /* execute */
1934 NULL, /* sub */
1935 NULL, /* next */
1936 0, /* static_pass_number */
1937 TV_TREE_EH, /* tv_id */
1938 PROP_gimple_lcf, /* properties_required */
1939 PROP_gimple_leh, /* properties_provided */
1940 0, /* properties_destroyed */
1941 0, /* todo_flags_start */
1942 TODO_dump_func /* todo_flags_finish */
1947 /* Construct EH edges for STMT. */
1949 static void
1950 make_eh_edge (struct eh_region_d *region, void *data)
1952 gimple stmt;
1953 tree lab;
1954 basic_block src, dst;
1956 stmt = (gimple) data;
1957 lab = get_eh_region_tree_label (region);
1959 src = gimple_bb (stmt);
1960 dst = label_to_block (lab);
1962 make_edge (src, dst, EDGE_EH);
1965 /* See if STMT is call that might be inlined. */
1967 static bool
1968 inlinable_call_p (gimple stmt)
1970 tree decl;
1971 if (gimple_code (stmt) != GIMPLE_CALL)
1972 return false;
1973 if (cfun->after_inlining)
1974 return false;
1975 /* Indirect calls can be propagated to direct call
1976 and inlined. */
1977 decl = gimple_call_fndecl (stmt);
1978 if (!decl)
1979 return true;
1980 if (cgraph_function_flags_ready
1981 && cgraph_function_body_availability (cgraph_node (decl))
1982 < AVAIL_OVERWRITABLE)
1983 return false;
1984 return !DECL_UNINLINABLE (decl);
1987 void
1988 make_eh_edges (gimple stmt)
1990 int region_nr;
1991 bool is_resx;
1992 bool inlinable = false;
1993 basic_block bb;
1995 if (gimple_code (stmt) == GIMPLE_RESX)
1997 region_nr = gimple_resx_region (stmt);
1998 is_resx = true;
2000 else
2002 region_nr = lookup_stmt_eh_region (stmt);
2003 if (region_nr < 0)
2004 return;
2005 is_resx = false;
2006 inlinable = inlinable_call_p (stmt);
2009 foreach_reachable_handler (region_nr, is_resx, inlinable, make_eh_edge, stmt);
2011 /* Make CFG profile more consistent assuming that exception will resume to first
2012 available EH handler. In practice this makes little difference, but we get
2013 fewer consistency errors in the dumps. */
2014 bb = gimple_bb (stmt);
2015 if (is_resx && EDGE_COUNT (bb->succs))
2016 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
2019 /* Redirect EH edge E to NEW_BB. */
2021 edge
2022 redirect_eh_edge (edge e, basic_block new_bb)
2024 gimple stmt = gsi_stmt (gsi_last_bb (e->src));
2025 int region_nr, new_region_nr;
2026 bool is_resx;
2027 bool inlinable = false;
2028 tree label = gimple_block_label (new_bb);
2029 struct eh_region_d *r;
2031 if (gimple_code (stmt) == GIMPLE_RESX)
2033 region_nr = gimple_resx_region (stmt);
2034 is_resx = true;
2036 else
2038 region_nr = lookup_stmt_eh_region (stmt);
2039 gcc_assert (region_nr >= 0);
2040 is_resx = false;
2041 inlinable = inlinable_call_p (stmt);
2044 if (dump_file && (dump_flags & TDF_DETAILS))
2045 fprintf (dump_file, "Redirecting EH edge %i->%i to %i, region %i, resx %i\n",
2046 e->src->index, e->dest->index, new_bb->index, region_nr, is_resx);
2047 r = redirect_eh_edge_to_label (e, label, is_resx, inlinable, region_nr);
2048 new_region_nr = get_eh_region_number (r);
2049 if (new_region_nr != region_nr)
2051 if (is_resx)
2052 gimple_resx_set_region (stmt, new_region_nr);
2053 else
2055 remove_stmt_from_eh_region (stmt);
2056 add_stmt_to_eh_region (stmt, new_region_nr);
2059 e = ssa_redirect_edge (e, new_bb);
2060 return e;
2063 static bool mark_eh_edge_found_error;
2065 /* Mark edge make_eh_edge would create for given region by setting it aux
2066 field, output error if something goes wrong. */
2068 static void
2069 mark_eh_edge (struct eh_region_d *region, void *data)
2071 gimple stmt;
2072 tree lab;
2073 basic_block src, dst;
2074 edge e;
2076 stmt = (gimple) data;
2077 lab = get_eh_region_tree_label (region);
2079 src = gimple_bb (stmt);
2080 dst = label_to_block (lab);
2082 e = find_edge (src, dst);
2083 if (!e)
2085 error ("EH edge %i->%i is missing", src->index, dst->index);
2086 mark_eh_edge_found_error = true;
2088 else if (!(e->flags & EDGE_EH))
2090 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
2091 mark_eh_edge_found_error = true;
2093 else if (e->aux)
2095 /* ??? might not be mistake. */
2096 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
2097 mark_eh_edge_found_error = true;
2099 else
2100 e->aux = (void *)1;
2103 /* Verify that BB containing STMT as the last statement, has precisely the
2104 edges that make_eh_edges would create. */
2106 bool
2107 verify_eh_edges (gimple stmt)
2109 int region_nr;
2110 bool is_resx;
2111 basic_block bb = gimple_bb (stmt);
2112 edge_iterator ei;
2113 edge e;
2114 bool inlinable = false;
2116 FOR_EACH_EDGE (e, ei, bb->succs)
2117 gcc_assert (!e->aux);
2118 mark_eh_edge_found_error = false;
2119 if (gimple_code (stmt) == GIMPLE_RESX)
2121 region_nr = gimple_resx_region (stmt);
2122 is_resx = true;
2124 else
2126 region_nr = lookup_stmt_eh_region (stmt);
2127 if (region_nr < 0)
2129 FOR_EACH_EDGE (e, ei, bb->succs)
2130 if (e->flags & EDGE_EH)
2132 error ("BB %i can not throw but has EH edges", bb->index);
2133 return true;
2135 return false;
2137 if (!stmt_could_throw_p (stmt))
2139 error ("BB %i last statement has incorrectly set region", bb->index);
2140 return true;
2142 inlinable = inlinable_call_p (stmt);
2143 is_resx = false;
2146 foreach_reachable_handler (region_nr, is_resx, inlinable, mark_eh_edge, stmt);
2147 FOR_EACH_EDGE (e, ei, bb->succs)
2149 if ((e->flags & EDGE_EH) && !e->aux)
2151 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
2152 mark_eh_edge_found_error = true;
2153 return true;
2155 e->aux = NULL;
2158 return mark_eh_edge_found_error;
2162 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2164 bool
2165 operation_could_trap_helper_p (enum tree_code op,
2166 bool fp_operation,
2167 bool honor_trapv,
2168 bool honor_nans,
2169 bool honor_snans,
2170 tree divisor,
2171 bool *handled)
2173 *handled = true;
2174 switch (op)
2176 case TRUNC_DIV_EXPR:
2177 case CEIL_DIV_EXPR:
2178 case FLOOR_DIV_EXPR:
2179 case ROUND_DIV_EXPR:
2180 case EXACT_DIV_EXPR:
2181 case CEIL_MOD_EXPR:
2182 case FLOOR_MOD_EXPR:
2183 case ROUND_MOD_EXPR:
2184 case TRUNC_MOD_EXPR:
2185 case RDIV_EXPR:
2186 if (honor_snans || honor_trapv)
2187 return true;
2188 if (fp_operation)
2189 return flag_trapping_math;
2190 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2191 return true;
2192 return false;
2194 case LT_EXPR:
2195 case LE_EXPR:
2196 case GT_EXPR:
2197 case GE_EXPR:
2198 case LTGT_EXPR:
2199 /* Some floating point comparisons may trap. */
2200 return honor_nans;
2202 case EQ_EXPR:
2203 case NE_EXPR:
2204 case UNORDERED_EXPR:
2205 case ORDERED_EXPR:
2206 case UNLT_EXPR:
2207 case UNLE_EXPR:
2208 case UNGT_EXPR:
2209 case UNGE_EXPR:
2210 case UNEQ_EXPR:
2211 return honor_snans;
2213 case CONVERT_EXPR:
2214 case FIX_TRUNC_EXPR:
2215 /* Conversion of floating point might trap. */
2216 return honor_nans;
2218 case NEGATE_EXPR:
2219 case ABS_EXPR:
2220 case CONJ_EXPR:
2221 /* These operations don't trap with floating point. */
2222 if (honor_trapv)
2223 return true;
2224 return false;
2226 case PLUS_EXPR:
2227 case MINUS_EXPR:
2228 case MULT_EXPR:
2229 /* Any floating arithmetic may trap. */
2230 if (fp_operation && flag_trapping_math)
2231 return true;
2232 if (honor_trapv)
2233 return true;
2234 return false;
2236 default:
2237 /* Any floating arithmetic may trap. */
2238 if (fp_operation && flag_trapping_math)
2239 return true;
2241 *handled = false;
2242 return false;
2246 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2247 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2248 type operands that may trap. If OP is a division operator, DIVISOR contains
2249 the value of the divisor. */
2251 bool
2252 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2253 tree divisor)
2255 bool honor_nans = (fp_operation && flag_trapping_math
2256 && !flag_finite_math_only);
2257 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2258 bool handled;
2260 if (TREE_CODE_CLASS (op) != tcc_comparison
2261 && TREE_CODE_CLASS (op) != tcc_unary
2262 && TREE_CODE_CLASS (op) != tcc_binary)
2263 return false;
2265 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2266 honor_nans, honor_snans, divisor,
2267 &handled);
2270 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2271 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2272 This routine expects only GIMPLE lhs or rhs input. */
2274 bool
2275 tree_could_trap_p (tree expr)
2277 enum tree_code code;
2278 bool fp_operation = false;
2279 bool honor_trapv = false;
2280 tree t, base, div = NULL_TREE;
2282 if (!expr)
2283 return false;
2285 code = TREE_CODE (expr);
2286 t = TREE_TYPE (expr);
2288 if (t)
2290 if (COMPARISON_CLASS_P (expr))
2291 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2292 else
2293 fp_operation = FLOAT_TYPE_P (t);
2294 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2297 if (TREE_CODE_CLASS (code) == tcc_binary)
2298 div = TREE_OPERAND (expr, 1);
2299 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2300 return true;
2302 restart:
2303 switch (code)
2305 case TARGET_MEM_REF:
2306 /* For TARGET_MEM_REFs use the information based on the original
2307 reference. */
2308 expr = TMR_ORIGINAL (expr);
2309 code = TREE_CODE (expr);
2310 goto restart;
2312 case COMPONENT_REF:
2313 case REALPART_EXPR:
2314 case IMAGPART_EXPR:
2315 case BIT_FIELD_REF:
2316 case VIEW_CONVERT_EXPR:
2317 case WITH_SIZE_EXPR:
2318 expr = TREE_OPERAND (expr, 0);
2319 code = TREE_CODE (expr);
2320 goto restart;
2322 case ARRAY_RANGE_REF:
2323 base = TREE_OPERAND (expr, 0);
2324 if (tree_could_trap_p (base))
2325 return true;
2327 if (TREE_THIS_NOTRAP (expr))
2328 return false;
2330 return !range_in_array_bounds_p (expr);
2332 case ARRAY_REF:
2333 base = TREE_OPERAND (expr, 0);
2334 if (tree_could_trap_p (base))
2335 return true;
2337 if (TREE_THIS_NOTRAP (expr))
2338 return false;
2340 return !in_array_bounds_p (expr);
2342 case INDIRECT_REF:
2343 case ALIGN_INDIRECT_REF:
2344 case MISALIGNED_INDIRECT_REF:
2345 return !TREE_THIS_NOTRAP (expr);
2347 case ASM_EXPR:
2348 return TREE_THIS_VOLATILE (expr);
2351 case CALL_EXPR:
2352 t = get_callee_fndecl (expr);
2353 /* Assume that calls to weak functions may trap. */
2354 if (!t || !DECL_P (t) || DECL_WEAK (t))
2355 return true;
2356 return false;
2358 default:
2359 return false;
2364 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2365 an assignment or a conditional) may throw. */
2367 static bool
2368 stmt_could_throw_1_p (gimple stmt)
2370 enum tree_code code = gimple_expr_code (stmt);
2371 bool honor_nans = false;
2372 bool honor_snans = false;
2373 bool fp_operation = false;
2374 bool honor_trapv = false;
2375 tree t;
2376 size_t i;
2377 bool handled, ret;
2379 if (TREE_CODE_CLASS (code) == tcc_comparison
2380 || TREE_CODE_CLASS (code) == tcc_unary
2381 || TREE_CODE_CLASS (code) == tcc_binary)
2383 t = gimple_expr_type (stmt);
2384 fp_operation = FLOAT_TYPE_P (t);
2385 if (fp_operation)
2387 honor_nans = flag_trapping_math && !flag_finite_math_only;
2388 honor_snans = flag_signaling_nans != 0;
2390 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2391 honor_trapv = true;
2394 /* Check if the main expression may trap. */
2395 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2396 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2397 honor_nans, honor_snans, t,
2398 &handled);
2399 if (handled)
2400 return ret;
2402 /* If the expression does not trap, see if any of the individual operands may
2403 trap. */
2404 for (i = 0; i < gimple_num_ops (stmt); i++)
2405 if (tree_could_trap_p (gimple_op (stmt, i)))
2406 return true;
2408 return false;
2412 /* Return true if statement STMT could throw an exception. */
2414 bool
2415 stmt_could_throw_p (gimple stmt)
2417 enum gimple_code code;
2419 if (!flag_exceptions)
2420 return false;
2422 /* The only statements that can throw an exception are assignments,
2423 conditionals, calls and asms. */
2424 code = gimple_code (stmt);
2425 if (code != GIMPLE_ASSIGN
2426 && code != GIMPLE_COND
2427 && code != GIMPLE_CALL
2428 && code != GIMPLE_ASM)
2429 return false;
2431 /* If exceptions can only be thrown by function calls and STMT is not a
2432 GIMPLE_CALL, the statement cannot throw. */
2433 if (!flag_non_call_exceptions && code != GIMPLE_CALL)
2434 return false;
2436 if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
2437 return stmt_could_throw_1_p (stmt);
2438 else if (is_gimple_call (stmt))
2439 return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
2440 else if (gimple_code (stmt) == GIMPLE_ASM)
2441 return (gimple_asm_volatile_p (stmt));
2442 else
2443 gcc_unreachable ();
2445 return false;
2449 /* Return true if expression T could throw an exception. */
2451 bool
2452 tree_could_throw_p (tree t)
2454 if (!flag_exceptions)
2455 return false;
2456 if (TREE_CODE (t) == MODIFY_EXPR)
2458 if (flag_non_call_exceptions
2459 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2460 return true;
2461 t = TREE_OPERAND (t, 1);
2464 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2465 t = TREE_OPERAND (t, 0);
2466 if (TREE_CODE (t) == CALL_EXPR)
2467 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2468 if (flag_non_call_exceptions)
2469 return tree_could_trap_p (t);
2470 return false;
2473 /* Return true if STMT can throw an exception that is not caught within
2474 the current function (CFUN). */
2476 bool
2477 stmt_can_throw_external (gimple stmt)
2479 int region_nr;
2480 bool is_resx = false;
2481 bool inlinable_call = false;
2483 if (!stmt_could_throw_p (stmt))
2484 return false;
2486 if (gimple_code (stmt) == GIMPLE_RESX)
2488 region_nr = gimple_resx_region (stmt);
2489 is_resx = true;
2491 else
2492 region_nr = lookup_stmt_eh_region (stmt);
2494 if (region_nr < 0)
2495 return true;
2497 return can_throw_external_1 (region_nr, is_resx, inlinable_call);
2500 /* Return true if STMT can throw an exception that is caught within
2501 the current function (CFUN). */
2503 bool
2504 stmt_can_throw_internal (gimple stmt)
2506 int region_nr;
2507 bool is_resx = false;
2508 bool inlinable_call = false;
2510 if (gimple_code (stmt) == GIMPLE_RESX)
2512 region_nr = gimple_resx_region (stmt);
2513 is_resx = true;
2515 else
2517 region_nr = lookup_stmt_eh_region (stmt);
2518 inlinable_call = inlinable_call_p (stmt);
2521 if (region_nr < 0)
2522 return false;
2524 return can_throw_internal_1 (region_nr, is_resx, inlinable_call);
2528 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2529 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2530 in the table if it should be in there. Return TRUE if a replacement was
2531 done that my require an EH edge purge. */
2533 bool
2534 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2536 int region_nr = lookup_stmt_eh_region (old_stmt);
2538 if (region_nr >= 0)
2540 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2542 if (new_stmt == old_stmt && new_stmt_could_throw)
2543 return false;
2545 remove_stmt_from_eh_region (old_stmt);
2546 if (new_stmt_could_throw)
2548 add_stmt_to_eh_region (new_stmt, region_nr);
2549 return false;
2551 else
2552 return true;
2555 return false;
2558 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2559 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2560 this only handles handlers consisting of a single call, as that's the
2561 important case for C++: a destructor call for a particular object showing
2562 up in multiple handlers. */
2564 static bool
2565 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2567 gimple_stmt_iterator gsi;
2568 gimple ones, twos;
2569 unsigned int ai;
2571 gsi = gsi_start (oneh);
2572 if (!gsi_one_before_end_p (gsi))
2573 return false;
2574 ones = gsi_stmt (gsi);
2576 gsi = gsi_start (twoh);
2577 if (!gsi_one_before_end_p (gsi))
2578 return false;
2579 twos = gsi_stmt (gsi);
2581 if (!is_gimple_call (ones)
2582 || !is_gimple_call (twos)
2583 || gimple_call_lhs (ones)
2584 || gimple_call_lhs (twos)
2585 || gimple_call_chain (ones)
2586 || gimple_call_chain (twos)
2587 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2588 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2589 return false;
2591 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2592 if (!operand_equal_p (gimple_call_arg (ones, ai),
2593 gimple_call_arg (twos, ai), 0))
2594 return false;
2596 return true;
2599 /* Optimize
2600 try { A() } finally { try { ~B() } catch { ~A() } }
2601 try { ... } finally { ~A() }
2602 into
2603 try { A() } catch { ~B() }
2604 try { ~B() ... } finally { ~A() }
2606 This occurs frequently in C++, where A is a local variable and B is a
2607 temporary used in the initializer for A. */
2609 static void
2610 optimize_double_finally (gimple one, gimple two)
2612 gimple oneh;
2613 gimple_stmt_iterator gsi;
2615 gsi = gsi_start (gimple_try_cleanup (one));
2616 if (!gsi_one_before_end_p (gsi))
2617 return;
2619 oneh = gsi_stmt (gsi);
2620 if (gimple_code (oneh) != GIMPLE_TRY
2621 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2622 return;
2624 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2626 gimple_seq seq = gimple_try_eval (oneh);
2628 gimple_try_set_cleanup (one, seq);
2629 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2630 seq = copy_gimple_seq_and_replace_locals (seq);
2631 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2632 gimple_try_set_eval (two, seq);
2636 /* Perform EH refactoring optimizations that are simpler to do when code
2637 flow has been lowered but EH structures haven't. */
2639 static void
2640 refactor_eh_r (gimple_seq seq)
2642 gimple_stmt_iterator gsi;
2643 gimple one, two;
2645 one = NULL;
2646 two = NULL;
2647 gsi = gsi_start (seq);
2648 while (1)
2650 one = two;
2651 if (gsi_end_p (gsi))
2652 two = NULL;
2653 else
2654 two = gsi_stmt (gsi);
2655 if (one
2656 && two
2657 && gimple_code (one) == GIMPLE_TRY
2658 && gimple_code (two) == GIMPLE_TRY
2659 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2660 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2661 optimize_double_finally (one, two);
2662 if (one)
2663 switch (gimple_code (one))
2665 case GIMPLE_TRY:
2666 refactor_eh_r (gimple_try_eval (one));
2667 refactor_eh_r (gimple_try_cleanup (one));
2668 break;
2669 case GIMPLE_CATCH:
2670 refactor_eh_r (gimple_catch_handler (one));
2671 break;
2672 case GIMPLE_EH_FILTER:
2673 refactor_eh_r (gimple_eh_filter_failure (one));
2674 break;
2675 default:
2676 break;
2678 if (two)
2679 gsi_next (&gsi);
2680 else
2681 break;
2685 static unsigned
2686 refactor_eh (void)
2688 refactor_eh_r (gimple_body (current_function_decl));
2689 return 0;
2692 struct gimple_opt_pass pass_refactor_eh =
2695 GIMPLE_PASS,
2696 "ehopt", /* name */
2697 NULL, /* gate */
2698 refactor_eh, /* execute */
2699 NULL, /* sub */
2700 NULL, /* next */
2701 0, /* static_pass_number */
2702 TV_TREE_EH, /* tv_id */
2703 PROP_gimple_lcf, /* properties_required */
2704 0, /* properties_provided */
2705 0, /* properties_destroyed */
2706 0, /* todo_flags_start */
2707 TODO_dump_func /* todo_flags_finish */
2711 /* Walk statements, see what regions are really references and remove unreachable ones. */
2713 static void
2714 tree_remove_unreachable_handlers (void)
2716 sbitmap reachable, contains_stmt;
2717 VEC(int,heap) * label_to_region;
2718 basic_block bb;
2720 label_to_region = label_to_region_map ();
2721 reachable = sbitmap_alloc (num_eh_regions ());
2722 sbitmap_zero (reachable);
2723 contains_stmt = sbitmap_alloc (num_eh_regions ());
2724 sbitmap_zero (contains_stmt);
2726 FOR_EACH_BB (bb)
2728 gimple_stmt_iterator gsi;
2729 int region;
2730 bool has_eh_preds = bb_has_eh_pred (bb);
2732 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2734 gimple stmt = gsi_stmt (gsi);
2736 if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
2738 int uid = LABEL_DECL_UID (gimple_label_label (stmt));
2739 int region;
2741 for (region = VEC_index (int, label_to_region, uid);
2742 region; region = get_next_region_sharing_label (region))
2743 SET_BIT (reachable, region);
2745 if (gimple_code (stmt) == GIMPLE_RESX)
2746 SET_BIT (reachable,
2747 VEC_index (eh_region, cfun->eh->region_array,
2748 gimple_resx_region (stmt))->region_number);
2749 if ((region = lookup_stmt_eh_region (stmt)) >= 0)
2750 SET_BIT (contains_stmt, region);
2754 if (dump_file)
2756 fprintf (dump_file, "Before removal of unreachable regions:\n");
2757 dump_eh_tree (dump_file, cfun);
2758 fprintf (dump_file, "Reachable regions: ");
2759 dump_sbitmap_file (dump_file, reachable);
2760 fprintf (dump_file, "Regions containing insns: ");
2761 dump_sbitmap_file (dump_file, contains_stmt);
2764 remove_unreachable_regions (reachable, contains_stmt);
2765 sbitmap_free (reachable);
2766 sbitmap_free (contains_stmt);
2767 VEC_free (int, heap, label_to_region);
2768 if (dump_file)
2770 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
2771 dump_eh_tree (dump_file, cfun);
2772 fprintf (dump_file, "\n\n");
2776 /* Pattern match emtpy EH receiver looking like:
2778 save_filt.6352_662 = [filter_expr] <<<filter object>>>;
2779 save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
2780 <<<exception object>>> = save_eptr.6351_663;
2781 <<<filter object>>> = save_filt.6352_662;
2782 resx 1
2784 And various minor variants after DCE or copy propagation.
2787 static int
2788 tree_empty_eh_handler_p (basic_block bb)
2790 gimple_stmt_iterator gsi;
2791 int region;
2792 edge_iterator ei;
2793 edge e;
2794 use_operand_p imm_use;
2795 gimple use_stmt;
2796 bool found = false;
2798 gsi = gsi_last_bb (bb);
2800 /* RESX */
2801 if (gsi_end_p (gsi))
2802 return 0;
2803 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX)
2804 return 0;
2805 region = gimple_resx_region (gsi_stmt (gsi));
2807 /* filter_object set. */
2808 gsi_prev (&gsi);
2809 if (gsi_end_p (gsi))
2810 return 0;
2811 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_ASSIGN)
2813 tree filter_tmp;
2814 tree exc_ptr_tmp;
2816 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR)
2817 return 0;
2818 filter_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
2820 /* filter_object set. */
2821 gsi_prev (&gsi);
2822 if (gsi_end_p (gsi))
2823 return 0;
2824 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2825 return 0;
2826 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2827 return 0;
2828 exc_ptr_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
2830 /* exc_ptr get. */
2831 if (TREE_CODE (exc_ptr_tmp) != EXC_PTR_EXPR)
2833 gsi_prev (&gsi);
2834 if (gsi_end_p (gsi))
2835 return 0;
2836 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2837 return 0;
2838 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2839 return 0;
2840 if (exc_ptr_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
2841 return 0;
2842 if (!single_imm_use (exc_ptr_tmp, &imm_use, &use_stmt))
2843 return 0;
2846 /* filter_object get. */
2847 if (TREE_CODE (filter_tmp) != FILTER_EXPR)
2849 gsi_prev (&gsi);
2850 if (gsi_end_p (gsi))
2851 return 0;
2852 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2853 return 0;
2854 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR)
2855 return 0;
2856 if (filter_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
2857 return 0;
2858 if (!single_imm_use (filter_tmp, &imm_use, &use_stmt))
2859 return 0;
2862 /* label. */
2863 gsi_prev (&gsi);
2864 if (gsi_end_p (gsi))
2865 return 0;
2867 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2868 return 0;
2870 /* Be sure that there is at least on EH region reaching the block directly.
2871 After EH edge redirection, it is possible that block is reached by one handler
2872 but resumed by different. */
2873 FOR_EACH_EDGE (e, ei, bb->preds)
2874 if ((e->flags & EDGE_EH))
2875 found = true;
2876 if (found)
2877 return region;
2878 return 0;
2881 /* Return true if it is possible to remove basic block BB and propagate
2882 through PHIs.
2884 This means that every PHI in BB has all uses such that they are PHIs
2885 of basic blocks reachable througt BB and they appears only in use
2886 reachable by the edge from BB to the block contianing the use.
2888 This is same as in merge-phi code, but in slightly more general setting
2889 because BB can have multiple successors. */
2891 static bool
2892 all_phis_safe_to_merge (basic_block bb)
2894 gimple_stmt_iterator si;
2895 bool ok = true;
2897 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2899 gimple phi = gsi_stmt (si);
2900 tree result = gimple_phi_result (phi);
2901 gimple stmt;
2902 use_operand_p imm_use;
2903 imm_use_iterator imm_iter;
2905 /* If the PHI's result is never used, then we can just
2906 ignore it. */
2907 if (has_zero_uses (result))
2908 continue;
2909 /* We can always rebuild virtuals if needed. */
2910 if (!is_gimple_reg (result))
2911 continue;
2912 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, result)
2914 if (gimple_code (stmt) != GIMPLE_PHI)
2916 if (dump_file && (dump_flags & TDF_DETAILS))
2917 fprintf (dump_file,
2918 "PHI result has use in non-PHI statement.\n");
2919 ok = false;
2920 BREAK_FROM_IMM_USE_STMT (imm_iter);
2922 else
2923 FOR_EACH_IMM_USE_ON_STMT (imm_use, imm_iter)
2925 edge e;
2926 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (imm_use));
2927 if (e->src != bb)
2929 if (dump_file && (dump_flags & TDF_DETAILS))
2930 fprintf (dump_file, "PHI has use in PHI not reached from"
2931 "empty cleanup itself.\n");
2932 ok = false;
2933 break;
2936 if (!ok)
2937 BREAK_FROM_IMM_USE_STMT (imm_iter);
2939 if (!ok)
2940 return false;
2942 return ok;
2945 static bool dominance_info_invalidated;
2947 /* Information to pass into make_eh_edge_and_update_phi. */
2949 struct update_info
2951 basic_block bb_to_remove, bb;
2952 edge edge_to_remove;
2955 /* DATA points to update-info structure.
2956 Like make_eh_edge create EH edge from DATA->bb to basic block containing
2957 handler of REGION. In addition also update PHI operands by copying
2958 operands from DATA->bb_to_remove. */
2960 static void
2961 make_eh_edge_and_update_phi (struct eh_region_d *region, void *data)
2963 struct update_info *info = (struct update_info *) data;
2964 edge e, e2;
2965 tree lab;
2966 basic_block src, dst;
2967 gimple_stmt_iterator si;
2969 lab = get_eh_region_tree_label (region);
2971 src = info->bb;
2972 dst = label_to_block (lab);
2974 e = find_edge (src, dst);
2975 if (e)
2977 gcc_assert (e->flags & EDGE_EH);
2978 e->aux = e;
2979 return;
2981 dominance_info_invalidated = true;
2982 e2 = find_edge (info->bb_to_remove, dst);
2983 e = make_edge (src, dst, EDGE_EH);
2984 e->aux = e;
2985 gcc_assert (e2);
2986 for (si = gsi_start_phis (dst); !gsi_end_p (si); gsi_next (&si))
2988 gimple phi = gsi_stmt (si);
2989 tree use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e2));
2990 gimple def = (TREE_CODE (use) == SSA_NAME
2991 ? SSA_NAME_DEF_STMT (use) : NULL);
2993 if (def && gimple_bb (def) == info->bb_to_remove)
2995 use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (def,
2996 info->edge_to_remove));
2997 gcc_assert (info->bb_to_remove == info->edge_to_remove->dest);
2998 def = TREE_CODE (use) == SSA_NAME ? SSA_NAME_DEF_STMT (use) : NULL;
2999 gcc_assert (!def
3000 || gimple_bb (def) != info->bb_to_remove
3001 || !is_gimple_reg (use));
3003 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), use);
3007 /* Make EH edges corresponding to STMT while updating PHI nodes after removal
3008 empty cleanup BB_TO_REMOVE joined to BB containing STMT
3009 by EDGE_TO_REMOVE.
3011 Return if EDGE_TO_REMOVE was really removed. It might stay reachable when
3012 not all EH regions are cleaned up. */
3014 static bool
3015 update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
3017 int region_nr;
3018 bool is_resx;
3019 bool inlinable = false;
3020 struct update_info info;
3021 edge_iterator ei;
3022 edge e;
3023 int probability_sum = 0;
3024 bool removed = false;
3026 info.bb_to_remove = bb_to_remove;
3027 info.bb = gimple_bb (stmt);
3028 info.edge_to_remove = edge_to_remove;
3030 if (gimple_code (stmt) == GIMPLE_RESX)
3032 region_nr = gimple_resx_region (stmt);
3033 is_resx = true;
3035 else
3037 region_nr = lookup_stmt_eh_region (stmt);
3038 is_resx = false;
3039 inlinable = inlinable_call_p (stmt);
3042 /* First add new edges as neccesary. */
3043 foreach_reachable_handler (region_nr, is_resx, inlinable,
3044 make_eh_edge_and_update_phi, &info);
3046 /* And remove edges we didn't marked. */
3047 for (ei = ei_start (info.bb->succs); (e = ei_safe_edge (ei)); )
3049 if ((e->flags & EDGE_EH) && !e->aux)
3051 dominance_info_invalidated = true;
3052 if (e == edge_to_remove)
3053 removed = true;
3054 remove_edge (e);
3056 else
3058 e->aux = NULL;
3059 probability_sum += e->probability;
3060 ei_next (&ei);
3064 /* Make CFG profile more consistent assuming that exception will resume to
3065 first available EH handler. In practice this makes little difference, but
3066 we get fewer consistency errors in the dumps. */
3067 if (is_resx && EDGE_COUNT (info.bb->succs) && !probability_sum)
3068 EDGE_SUCC (info.bb, 0)->probability = REG_BR_PROB_BASE;
3069 return removed;
3072 /* Look for basic blocks containing empty exception handler and remove them.
3073 This is similar to jump forwarding, just across EH edges. */
3075 static bool
3076 cleanup_empty_eh (basic_block bb, VEC(int,heap) * label_to_region)
3078 int region;
3079 gimple_stmt_iterator si;
3080 edge_iterator ei;
3082 /* When handler of EH region winds up to be empty, we can safely
3083 remove it. This leads to inner EH regions to be redirected
3084 to outer one, if present in function. So we need to rebuild
3085 EH edges in all sources. */
3086 if ((region = tree_empty_eh_handler_p (bb))
3087 && all_phis_safe_to_merge (bb))
3089 edge e;
3090 bool found = false, removed_some = false, has_non_eh_preds = false;
3091 gimple_stmt_iterator gsi;
3093 /* Look for all EH regions sharing label of this block.
3094 If they are not same as REGION, remove them and replace them
3095 by outer region of REGION. Also note if REGION itself is one
3096 of them. */
3098 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3099 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
3101 int uid = LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi)));
3102 int r = VEC_index (int, label_to_region, uid);
3103 int next;
3105 while (r)
3107 next = get_next_region_sharing_label (r);
3108 if (r == region)
3109 found = true;
3110 else
3112 removed_some = true;
3113 remove_eh_region_and_replace_by_outer_of (r, region);
3114 if (dump_file && (dump_flags & TDF_DETAILS))
3115 fprintf (dump_file, "Empty EH handler %i removed and "
3116 "replaced by %i\n", r, region);
3118 r = next;
3121 else
3122 break;
3124 gcc_assert (found || removed_some);
3125 FOR_EACH_EDGE (e, ei, bb->preds)
3126 if (!(e->flags & EDGE_EH))
3127 has_non_eh_preds = true;
3129 /* When block is empty EH cleanup, but it is reachable via non-EH code too,
3130 we can not remove the region it is resumed via, because doing so will
3131 lead to redirection of its RESX edges.
3133 This case will be handled later after edge forwarding if the EH cleanup
3134 is really dead. */
3136 if (found && !has_non_eh_preds)
3138 if (dump_file && (dump_flags & TDF_DETAILS))
3139 fprintf (dump_file, "Empty EH handler %i removed.\n", region);
3140 remove_eh_region (region);
3142 else if (!removed_some)
3143 return false;
3145 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3147 basic_block src = e->src;
3148 if (!(e->flags & EDGE_EH))
3150 ei_next (&ei);
3151 continue;
3153 if (stmt_can_throw_internal (last_stmt (src)))
3155 if (!update_eh_edges (last_stmt (src), bb, e))
3156 ei_next (&ei);
3158 else
3159 remove_edge (e);
3162 /* Verify that we eliminated all uses of PHI we are going to remove.
3163 If we didn't, rebuild SSA on affected variable (this is allowed only
3164 for virtuals). */
3165 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
3167 gimple phi = gsi_stmt (si);
3168 tree result = gimple_phi_result (phi);
3169 if (!has_zero_uses (result))
3171 use_operand_p use_p;
3172 imm_use_iterator iter;
3173 gimple stmt;
3175 FOR_EACH_IMM_USE_STMT (stmt, iter, result)
3177 /* We have use, see if it won't disappear after
3178 removing BB. */
3179 if (gimple_bb (stmt) == bb)
3180 continue;
3181 if (gimple_code (stmt) == GIMPLE_PHI)
3183 bool bad = false;
3185 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3186 if (gimple_phi_arg_edge (stmt,
3187 PHI_ARG_INDEX_FROM_USE (use_p))->src != bb)
3189 bad = true;
3190 break;
3193 if (!bad)
3194 continue;
3197 gcc_assert (!is_gimple_reg (result));
3198 mark_sym_for_renaming (SSA_NAME_VAR (result));
3199 /* As we are going to delete this block we will release all
3200 defs which makes the immediate uses on use stmts invalid.
3201 Avoid that by replacing all uses with the bare variable
3202 and updating the stmts. */
3203 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3204 SET_USE (use_p, SSA_NAME_VAR (result));
3205 update_stmt (stmt);
3209 if (!ei_safe_edge (ei_start (bb->preds)))
3210 delete_basic_block (bb);
3211 return true;
3213 return false;
3217 /* Perform cleanups and lowering of exception handling
3218 1) cleanups regions with handlers doing nothing are optimized out
3219 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3220 3) Info about regions that are containing instructions, and regions
3221 reachable via local EH edges is collected
3222 4) Eh tree is pruned for regions no longer neccesary.
3225 static unsigned int
3226 cleanup_eh (void)
3228 bool changed = false;
3229 basic_block bb;
3230 VEC(int,heap) * label_to_region;
3231 int i;
3233 if (!cfun->eh)
3234 return 0;
3235 if (dump_file)
3237 fprintf (dump_file, "Before cleanups:\n");
3238 dump_eh_tree (dump_file, cfun);
3241 if (optimize)
3243 label_to_region = label_to_region_map ();
3244 dominance_info_invalidated = false;
3245 /* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
3246 for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
3248 bb = BASIC_BLOCK (i);
3249 if (bb)
3250 changed |= cleanup_empty_eh (bb, label_to_region);
3252 VEC_free (int, heap, label_to_region);
3253 if (dominance_info_invalidated)
3255 free_dominance_info (CDI_DOMINATORS);
3256 free_dominance_info (CDI_POST_DOMINATORS);
3259 /* Removing contained cleanup can render MUST_NOT_THROW regions empty. */
3260 if (changed)
3261 delete_unreachable_blocks ();
3264 tree_remove_unreachable_handlers ();
3265 if (dump_file)
3267 fprintf (dump_file, "After cleanups:\n");
3268 dump_eh_tree (dump_file, cfun);
3271 return (changed ? TODO_cleanup_cfg | TODO_update_ssa : 0);
3274 struct gimple_opt_pass pass_cleanup_eh = {
3276 GIMPLE_PASS,
3277 "ehcleanup", /* name */
3278 NULL, /* gate */
3279 cleanup_eh, /* execute */
3280 NULL, /* sub */
3281 NULL, /* next */
3282 0, /* static_pass_number */
3283 TV_TREE_EH, /* tv_id */
3284 PROP_gimple_lcf, /* properties_required */
3285 0, /* properties_provided */
3286 0, /* properties_destroyed */
3287 0, /* todo_flags_start */
3288 TODO_dump_func /* todo_flags_finish */