Daily bump.
[official-gcc.git] / gcc / tree-eh.c
blob3eff07fc8feb23d215e2e9aa1d7d274aef309db3
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "diagnostic-core.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "except.h"
35 #include "cfganal.h"
36 #include "cfgcleanup.h"
37 #include "tree-eh.h"
38 #include "gimple-iterator.h"
39 #include "tree-cfg.h"
40 #include "tree-into-ssa.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "gimple-low.h"
46 #include "stringpool.h"
47 #include "attribs.h"
48 #include "asan.h"
49 #include "gimplify.h"
51 /* In some instances a tree and a gimple need to be stored in a same table,
52 i.e. in hash tables. This is a structure to do this. */
53 typedef union {tree *tp; tree t; gimple *g;} treemple;
55 /* Misc functions used in this file. */
57 /* Remember and lookup EH landing pad data for arbitrary statements.
58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
65 (2) There are many more statements that *cannot* throw as
66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
69 /* Add statement T in function IFUN to landing pad NUM. */
71 static void
72 add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
74 gcc_assert (num != 0);
76 if (!get_eh_throw_stmt_table (ifun))
77 set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
79 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
82 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
84 void
85 add_stmt_to_eh_lp (gimple *t, int num)
87 add_stmt_to_eh_lp_fn (cfun, t, num);
90 /* Add statement T to the single EH landing pad in REGION. */
92 static void
93 record_stmt_eh_region (eh_region region, gimple *t)
95 if (region == NULL)
96 return;
97 if (region->type == ERT_MUST_NOT_THROW)
98 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
99 else
101 eh_landing_pad lp = region->landing_pads;
102 if (lp == NULL)
103 lp = gen_eh_landing_pad (region);
104 else
105 gcc_assert (lp->next_lp == NULL);
106 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
111 /* Remove statement T in function IFUN from its EH landing pad. */
113 bool
114 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
116 if (!get_eh_throw_stmt_table (ifun))
117 return false;
119 if (!get_eh_throw_stmt_table (ifun)->get (t))
120 return false;
122 get_eh_throw_stmt_table (ifun)->remove (t);
123 return true;
127 /* Remove statement T in the current function (cfun) from its
128 EH landing pad. */
130 bool
131 remove_stmt_from_eh_lp (gimple *t)
133 return remove_stmt_from_eh_lp_fn (cfun, t);
136 /* Determine if statement T is inside an EH region in function IFUN.
137 Positive numbers indicate a landing pad index; negative numbers
138 indicate a MUST_NOT_THROW region index; zero indicates that the
139 statement is not recorded in the region table. */
142 lookup_stmt_eh_lp_fn (struct function *ifun, const gimple *t)
144 if (ifun->eh->throw_stmt_table == NULL)
145 return 0;
147 int *lp_nr = ifun->eh->throw_stmt_table->get (const_cast <gimple *> (t));
148 return lp_nr ? *lp_nr : 0;
151 /* Likewise, but always use the current function. */
154 lookup_stmt_eh_lp (const gimple *t)
156 /* We can get called from initialized data when -fnon-call-exceptions
157 is on; prevent crash. */
158 if (!cfun)
159 return 0;
160 return lookup_stmt_eh_lp_fn (cfun, t);
163 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 struct finally_tree_node
169 /* When storing a GIMPLE_TRY, we have to record a gimple. However
170 when deciding whether a GOTO to a certain LABEL_DECL (which is a
171 tree) leaves the TRY block, its necessary to record a tree in
172 this field. Thus a treemple is used. */
173 treemple child;
174 gtry *parent;
177 /* Hashtable helpers. */
179 struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
181 static inline hashval_t hash (const finally_tree_node *);
182 static inline bool equal (const finally_tree_node *,
183 const finally_tree_node *);
186 inline hashval_t
187 finally_tree_hasher::hash (const finally_tree_node *v)
189 return (intptr_t)v->child.t >> 4;
192 inline bool
193 finally_tree_hasher::equal (const finally_tree_node *v,
194 const finally_tree_node *c)
196 return v->child.t == c->child.t;
199 /* Note that this table is *not* marked GTY. It is short-lived. */
200 static hash_table<finally_tree_hasher> *finally_tree;
202 static void
203 record_in_finally_tree (treemple child, gtry *parent)
205 struct finally_tree_node *n;
206 finally_tree_node **slot;
208 n = XNEW (struct finally_tree_node);
209 n->child = child;
210 n->parent = parent;
212 slot = finally_tree->find_slot (n, INSERT);
213 gcc_assert (!*slot);
214 *slot = n;
217 static void
218 collect_finally_tree (gimple *stmt, gtry *region);
220 /* Go through the gimple sequence. Works with collect_finally_tree to
221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
223 static void
224 collect_finally_tree_1 (gimple_seq seq, gtry *region)
226 gimple_stmt_iterator gsi;
228 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
229 collect_finally_tree (gsi_stmt (gsi), region);
232 static void
233 collect_finally_tree (gimple *stmt, gtry *region)
235 treemple temp;
237 switch (gimple_code (stmt))
239 case GIMPLE_LABEL:
240 temp.t = gimple_label_label (as_a <glabel *> (stmt));
241 record_in_finally_tree (temp, region);
242 break;
244 case GIMPLE_TRY:
245 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
247 temp.g = stmt;
248 record_in_finally_tree (temp, region);
249 collect_finally_tree_1 (gimple_try_eval (stmt),
250 as_a <gtry *> (stmt));
251 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
253 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
255 collect_finally_tree_1 (gimple_try_eval (stmt), region);
256 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
258 break;
260 case GIMPLE_CATCH:
261 collect_finally_tree_1 (gimple_catch_handler (
262 as_a <gcatch *> (stmt)),
263 region);
264 break;
266 case GIMPLE_EH_FILTER:
267 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
268 break;
270 case GIMPLE_EH_ELSE:
272 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
273 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
274 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
276 break;
278 default:
279 /* A type, a decl, or some kind of statement that we're not
280 interested in. Don't walk them. */
281 break;
286 /* Use the finally tree to determine if a jump from START to TARGET
287 would leave the try_finally node that START lives in. */
289 static bool
290 outside_finally_tree (treemple start, gimple *target)
292 struct finally_tree_node n, *p;
296 n.child = start;
297 p = finally_tree->find (&n);
298 if (!p)
299 return true;
300 start.g = p->parent;
302 while (start.g != target);
304 return false;
307 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
308 nodes into a set of gotos, magic labels, and eh regions.
309 The eh region creation is straight-forward, but frobbing all the gotos
310 and such into shape isn't. */
312 /* The sequence into which we record all EH stuff. This will be
313 placed at the end of the function when we're all done. */
314 static gimple_seq eh_seq;
316 /* Record whether an EH region contains something that can throw,
317 indexed by EH region number. */
318 static bitmap eh_region_may_contain_throw_map;
320 /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
321 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
322 The idea is to record a gimple statement for everything except for
323 the conditionals, which get their labels recorded. Since labels are
324 of type 'tree', we need this node to store both gimple and tree
325 objects. REPL_STMT is the sequence used to replace the goto/return
326 statement. CONT_STMT is used to store the statement that allows
327 the return/goto to jump to the original destination. */
329 struct goto_queue_node
331 treemple stmt;
332 location_t location;
333 gimple_seq repl_stmt;
334 gimple *cont_stmt;
335 int index;
336 /* This is used when index >= 0 to indicate that stmt is a label (as
337 opposed to a goto stmt). */
338 int is_label;
341 /* State of the world while lowering. */
343 struct leh_state
345 /* What's "current" while constructing the eh region tree. These
346 correspond to variables of the same name in cfun->eh, which we
347 don't have easy access to. */
348 eh_region cur_region;
350 /* What's "current" for the purposes of __builtin_eh_pointer. For
351 a CATCH, this is the associated TRY. For an EH_FILTER, this is
352 the associated ALLOWED_EXCEPTIONS, etc. */
353 eh_region ehp_region;
355 /* Processing of TRY_FINALLY requires a bit more state. This is
356 split out into a separate structure so that we don't have to
357 copy so much when processing other nodes. */
358 struct leh_tf_state *tf;
360 /* Outer non-clean up region. */
361 eh_region outer_non_cleanup;
364 struct leh_tf_state
366 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
367 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
368 this so that outside_finally_tree can reliably reference the tree used
369 in the collect_finally_tree data structures. */
370 gtry *try_finally_expr;
371 gtry *top_p;
373 /* While lowering a top_p usually it is expanded into multiple statements,
374 thus we need the following field to store them. */
375 gimple_seq top_p_seq;
377 /* The state outside this try_finally node. */
378 struct leh_state *outer;
380 /* The exception region created for it. */
381 eh_region region;
383 /* The goto queue. */
384 struct goto_queue_node *goto_queue;
385 size_t goto_queue_size;
386 size_t goto_queue_active;
388 /* Pointer map to help in searching goto_queue when it is large. */
389 hash_map<gimple *, goto_queue_node *> *goto_queue_map;
391 /* The set of unique labels seen as entries in the goto queue. */
392 vec<tree> dest_array;
394 /* A label to be added at the end of the completed transformed
395 sequence. It will be set if may_fallthru was true *at one time*,
396 though subsequent transformations may have cleared that flag. */
397 tree fallthru_label;
399 /* True if it is possible to fall out the bottom of the try block.
400 Cleared if the fallthru is converted to a goto. */
401 bool may_fallthru;
403 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
404 bool may_return;
406 /* True if the finally block can receive an exception edge.
407 Cleared if the exception case is handled by code duplication. */
408 bool may_throw;
411 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
413 /* Search for STMT in the goto queue. Return the replacement,
414 or null if the statement isn't in the queue. */
416 #define LARGE_GOTO_QUEUE 20
418 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
420 static gimple_seq
421 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
423 unsigned int i;
425 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
427 for (i = 0; i < tf->goto_queue_active; i++)
428 if ( tf->goto_queue[i].stmt.g == stmt.g)
429 return tf->goto_queue[i].repl_stmt;
430 return NULL;
433 /* If we have a large number of entries in the goto_queue, create a
434 pointer map and use that for searching. */
436 if (!tf->goto_queue_map)
438 tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
439 for (i = 0; i < tf->goto_queue_active; i++)
441 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
442 &tf->goto_queue[i]);
443 gcc_assert (!existed);
447 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
448 if (slot != NULL)
449 return ((*slot)->repl_stmt);
451 return NULL;
454 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
455 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
456 then we can just splat it in, otherwise we add the new stmts immediately
457 after the GIMPLE_COND and redirect. */
459 static void
460 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
461 gimple_stmt_iterator *gsi)
463 tree label;
464 gimple_seq new_seq;
465 treemple temp;
466 location_t loc = gimple_location (gsi_stmt (*gsi));
468 temp.tp = tp;
469 new_seq = find_goto_replacement (tf, temp);
470 if (!new_seq)
471 return;
473 if (gimple_seq_singleton_p (new_seq)
474 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
476 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
477 return;
480 label = create_artificial_label (loc);
481 /* Set the new label for the GIMPLE_COND */
482 *tp = label;
484 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
485 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
488 /* The real work of replace_goto_queue. Returns with TSI updated to
489 point to the next statement. */
491 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
493 static void
494 replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
495 gimple_stmt_iterator *gsi)
497 gimple_seq seq;
498 treemple temp;
499 temp.g = NULL;
501 switch (gimple_code (stmt))
503 case GIMPLE_GOTO:
504 case GIMPLE_RETURN:
505 temp.g = stmt;
506 seq = find_goto_replacement (tf, temp);
507 if (seq)
509 gimple_stmt_iterator i;
510 seq = gimple_seq_copy (seq);
511 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
512 gimple_set_location (gsi_stmt (i), gimple_location (stmt));
513 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
514 gsi_remove (gsi, false);
515 return;
517 break;
519 case GIMPLE_COND:
520 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
521 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
522 break;
524 case GIMPLE_TRY:
525 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
526 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
527 break;
528 case GIMPLE_CATCH:
529 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
530 as_a <gcatch *> (stmt)),
531 tf);
532 break;
533 case GIMPLE_EH_FILTER:
534 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
535 break;
536 case GIMPLE_EH_ELSE:
538 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
539 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
540 tf);
541 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
542 tf);
544 break;
546 default:
547 /* These won't have gotos in them. */
548 break;
551 gsi_next (gsi);
554 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
556 static void
557 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
559 gimple_stmt_iterator gsi = gsi_start (*seq);
561 while (!gsi_end_p (gsi))
562 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
565 /* Replace all goto queue members. */
567 static void
568 replace_goto_queue (struct leh_tf_state *tf)
570 if (tf->goto_queue_active == 0)
571 return;
572 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
573 replace_goto_queue_stmt_list (&eh_seq, tf);
576 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
577 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
578 a gimple return. */
580 static void
581 record_in_goto_queue (struct leh_tf_state *tf,
582 treemple new_stmt,
583 int index,
584 bool is_label,
585 location_t location)
587 size_t active, size;
588 struct goto_queue_node *q;
590 gcc_assert (!tf->goto_queue_map);
592 active = tf->goto_queue_active;
593 size = tf->goto_queue_size;
594 if (active >= size)
596 size = (size ? size * 2 : 32);
597 tf->goto_queue_size = size;
598 tf->goto_queue
599 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
602 q = &tf->goto_queue[active];
603 tf->goto_queue_active = active + 1;
605 memset (q, 0, sizeof (*q));
606 q->stmt = new_stmt;
607 q->index = index;
608 q->location = location;
609 q->is_label = is_label;
612 /* Record the LABEL label in the goto queue contained in TF.
613 TF is not null. */
615 static void
616 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
617 location_t location)
619 int index;
620 treemple temp, new_stmt;
622 if (!label)
623 return;
625 /* Computed and non-local gotos do not get processed. Given
626 their nature we can neither tell whether we've escaped the
627 finally block nor redirect them if we knew. */
628 if (TREE_CODE (label) != LABEL_DECL)
629 return;
631 /* No need to record gotos that don't leave the try block. */
632 temp.t = label;
633 if (!outside_finally_tree (temp, tf->try_finally_expr))
634 return;
636 if (! tf->dest_array.exists ())
638 tf->dest_array.create (10);
639 tf->dest_array.quick_push (label);
640 index = 0;
642 else
644 int n = tf->dest_array.length ();
645 for (index = 0; index < n; ++index)
646 if (tf->dest_array[index] == label)
647 break;
648 if (index == n)
649 tf->dest_array.safe_push (label);
652 /* In the case of a GOTO we want to record the destination label,
653 since with a GIMPLE_COND we have an easy access to the then/else
654 labels. */
655 new_stmt = stmt;
656 record_in_goto_queue (tf, new_stmt, index, true, location);
659 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
660 node, and if so record that fact in the goto queue associated with that
661 try_finally node. */
663 static void
664 maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
666 struct leh_tf_state *tf = state->tf;
667 treemple new_stmt;
669 if (!tf)
670 return;
672 switch (gimple_code (stmt))
674 case GIMPLE_COND:
676 gcond *cond_stmt = as_a <gcond *> (stmt);
677 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
678 record_in_goto_queue_label (tf, new_stmt,
679 gimple_cond_true_label (cond_stmt),
680 EXPR_LOCATION (*new_stmt.tp));
681 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
682 record_in_goto_queue_label (tf, new_stmt,
683 gimple_cond_false_label (cond_stmt),
684 EXPR_LOCATION (*new_stmt.tp));
686 break;
687 case GIMPLE_GOTO:
688 new_stmt.g = stmt;
689 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
690 gimple_location (stmt));
691 break;
693 case GIMPLE_RETURN:
694 tf->may_return = true;
695 new_stmt.g = stmt;
696 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
697 break;
699 default:
700 gcc_unreachable ();
705 #if CHECKING_P
706 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
707 was in fact structured, and we've not yet done jump threading, then none
708 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
710 static void
711 verify_norecord_switch_expr (struct leh_state *state,
712 gswitch *switch_expr)
714 struct leh_tf_state *tf = state->tf;
715 size_t i, n;
717 if (!tf)
718 return;
720 n = gimple_switch_num_labels (switch_expr);
722 for (i = 0; i < n; ++i)
724 treemple temp;
725 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
726 temp.t = lab;
727 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
730 #else
731 #define verify_norecord_switch_expr(state, switch_expr)
732 #endif
734 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
735 non-null, insert it before the new branch. */
737 static void
738 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
740 gimple *x;
742 /* In the case of a return, the queue node must be a gimple statement. */
743 gcc_assert (!q->is_label);
745 /* Note that the return value may have already been computed, e.g.,
747 int x;
748 int foo (void)
750 x = 0;
751 try {
752 return x;
753 } finally {
754 x++;
758 should return 0, not 1. We don't have to do anything to make
759 this happens because the return value has been placed in the
760 RESULT_DECL already. */
762 q->cont_stmt = q->stmt.g;
764 if (mod)
765 gimple_seq_add_seq (&q->repl_stmt, mod);
767 x = gimple_build_goto (finlab);
768 gimple_set_location (x, q->location);
769 gimple_seq_add_stmt (&q->repl_stmt, x);
772 /* Similar, but easier, for GIMPLE_GOTO. */
774 static void
775 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
776 struct leh_tf_state *tf)
778 ggoto *x;
780 gcc_assert (q->is_label);
782 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
784 if (mod)
785 gimple_seq_add_seq (&q->repl_stmt, mod);
787 x = gimple_build_goto (finlab);
788 gimple_set_location (x, q->location);
789 gimple_seq_add_stmt (&q->repl_stmt, x);
792 /* Emit a standard landing pad sequence into SEQ for REGION. */
794 static void
795 emit_post_landing_pad (gimple_seq *seq, eh_region region)
797 eh_landing_pad lp = region->landing_pads;
798 glabel *x;
800 if (lp == NULL)
801 lp = gen_eh_landing_pad (region);
803 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
804 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
806 x = gimple_build_label (lp->post_landing_pad);
807 gimple_seq_add_stmt (seq, x);
810 /* Emit a RESX statement into SEQ for REGION. */
812 static void
813 emit_resx (gimple_seq *seq, eh_region region)
815 gresx *x = gimple_build_resx (region->index);
816 gimple_seq_add_stmt (seq, x);
817 if (region->outer)
818 record_stmt_eh_region (region->outer, x);
821 /* Note that the current EH region may contain a throw, or a
822 call to a function which itself may contain a throw. */
824 static void
825 note_eh_region_may_contain_throw (eh_region region)
827 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
829 if (region->type == ERT_MUST_NOT_THROW)
830 break;
831 region = region->outer;
832 if (region == NULL)
833 break;
837 /* Check if REGION has been marked as containing a throw. If REGION is
838 NULL, this predicate is false. */
840 static inline bool
841 eh_region_may_contain_throw (eh_region r)
843 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
846 /* We want to transform
847 try { body; } catch { stuff; }
849 normal_sequence:
850 body;
851 over:
852 eh_sequence:
853 landing_pad:
854 stuff;
855 goto over;
857 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
858 should be placed before the second operand, or NULL. OVER is
859 an existing label that should be put at the exit, or NULL. */
861 static gimple_seq
862 frob_into_branch_around (gtry *tp, eh_region region, tree over)
864 gimple *x;
865 gimple_seq cleanup, result;
866 location_t loc = gimple_location (tp);
868 cleanup = gimple_try_cleanup (tp);
869 result = gimple_try_eval (tp);
871 if (region)
872 emit_post_landing_pad (&eh_seq, region);
874 if (gimple_seq_may_fallthru (cleanup))
876 if (!over)
877 over = create_artificial_label (loc);
878 x = gimple_build_goto (over);
879 gimple_set_location (x, loc);
880 gimple_seq_add_stmt (&cleanup, x);
882 gimple_seq_add_seq (&eh_seq, cleanup);
884 if (over)
886 x = gimple_build_label (over);
887 gimple_seq_add_stmt (&result, x);
889 return result;
892 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
893 Make sure to record all new labels found. */
895 static gimple_seq
896 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
897 location_t loc)
899 gtry *region = NULL;
900 gimple_seq new_seq;
901 gimple_stmt_iterator gsi;
903 new_seq = copy_gimple_seq_and_replace_locals (seq);
905 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
907 gimple *stmt = gsi_stmt (gsi);
908 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
910 tree block = gimple_block (stmt);
911 gimple_set_location (stmt, loc);
912 gimple_set_block (stmt, block);
916 if (outer_state->tf)
917 region = outer_state->tf->try_finally_expr;
918 collect_finally_tree_1 (new_seq, region);
920 return new_seq;
923 /* A subroutine of lower_try_finally. Create a fallthru label for
924 the given try_finally state. The only tricky bit here is that
925 we have to make sure to record the label in our outer context. */
927 static tree
928 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
930 tree label = tf->fallthru_label;
931 treemple temp;
933 if (!label)
935 label = create_artificial_label (gimple_location (tf->try_finally_expr));
936 tf->fallthru_label = label;
937 if (tf->outer->tf)
939 temp.t = label;
940 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
943 return label;
946 /* A subroutine of lower_try_finally. If FINALLY consits of a
947 GIMPLE_EH_ELSE node, return it. */
949 static inline geh_else *
950 get_eh_else (gimple_seq finally)
952 gimple *x = gimple_seq_first_stmt (finally);
953 if (gimple_code (x) == GIMPLE_EH_ELSE)
955 gcc_assert (gimple_seq_singleton_p (finally));
956 return as_a <geh_else *> (x);
958 return NULL;
961 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
962 langhook returns non-null, then the language requires that the exception
963 path out of a try_finally be treated specially. To wit: the code within
964 the finally block may not itself throw an exception. We have two choices
965 here. First we can duplicate the finally block and wrap it in a
966 must_not_throw region. Second, we can generate code like
968 try {
969 finally_block;
970 } catch {
971 if (fintmp == eh_edge)
972 protect_cleanup_actions;
975 where "fintmp" is the temporary used in the switch statement generation
976 alternative considered below. For the nonce, we always choose the first
977 option.
979 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
981 static void
982 honor_protect_cleanup_actions (struct leh_state *outer_state,
983 struct leh_state *this_state,
984 struct leh_tf_state *tf)
986 gimple_seq finally = gimple_try_cleanup (tf->top_p);
988 /* EH_ELSE doesn't come from user code; only compiler generated stuff.
989 It does need to be handled here, so as to separate the (different)
990 EH path from the normal path. But we should not attempt to wrap
991 it with a must-not-throw node (which indeed gets in the way). */
992 if (geh_else *eh_else = get_eh_else (finally))
994 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
995 finally = gimple_eh_else_e_body (eh_else);
997 /* Let the ELSE see the exception that's being processed, but
998 since the cleanup is outside the try block, process it with
999 outer_state, otherwise it may be used as a cleanup for
1000 itself, and Bad Things (TM) ensue. */
1001 eh_region save_ehp = outer_state->ehp_region;
1002 outer_state->ehp_region = this_state->cur_region;
1003 lower_eh_constructs_1 (outer_state, &finally);
1004 outer_state->ehp_region = save_ehp;
1006 else
1008 /* First check for nothing to do. */
1009 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1010 return;
1011 tree actions = lang_hooks.eh_protect_cleanup_actions ();
1012 if (actions == NULL)
1013 return;
1015 if (this_state)
1016 finally = lower_try_finally_dup_block (finally, outer_state,
1017 gimple_location (tf->try_finally_expr));
1019 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1020 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1021 to be in an enclosing scope, but needs to be implemented at this level
1022 to avoid a nesting violation (see wrap_temporary_cleanups in
1023 cp/decl.c). Since it's logically at an outer level, we should call
1024 terminate before we get to it, so strip it away before adding the
1025 MUST_NOT_THROW filter. */
1026 gimple_stmt_iterator gsi = gsi_start (finally);
1027 gimple *x = gsi_stmt (gsi);
1028 if (gimple_code (x) == GIMPLE_TRY
1029 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1030 && gimple_try_catch_is_cleanup (x))
1032 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1033 gsi_remove (&gsi, false);
1036 /* Wrap the block with protect_cleanup_actions as the action. */
1037 geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions);
1038 gtry *try_stmt = gimple_build_try (finally,
1039 gimple_seq_alloc_with_stmt (eh_mnt),
1040 GIMPLE_TRY_CATCH);
1041 finally = lower_eh_must_not_throw (outer_state, try_stmt);
1044 /* Drop all of this into the exception sequence. */
1045 emit_post_landing_pad (&eh_seq, tf->region);
1046 gimple_seq_add_seq (&eh_seq, finally);
1047 if (gimple_seq_may_fallthru (finally))
1048 emit_resx (&eh_seq, tf->region);
1050 /* Having now been handled, EH isn't to be considered with
1051 the rest of the outgoing edges. */
1052 tf->may_throw = false;
1055 /* A subroutine of lower_try_finally. We have determined that there is
1056 no fallthru edge out of the finally block. This means that there is
1057 no outgoing edge corresponding to any incoming edge. Restructure the
1058 try_finally node for this special case. */
1060 static void
1061 lower_try_finally_nofallthru (struct leh_state *state,
1062 struct leh_tf_state *tf)
1064 tree lab;
1065 gimple *x;
1066 geh_else *eh_else;
1067 gimple_seq finally;
1068 struct goto_queue_node *q, *qe;
1070 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1072 /* We expect that tf->top_p is a GIMPLE_TRY. */
1073 finally = gimple_try_cleanup (tf->top_p);
1074 tf->top_p_seq = gimple_try_eval (tf->top_p);
1076 x = gimple_build_label (lab);
1077 gimple_seq_add_stmt (&tf->top_p_seq, x);
1079 q = tf->goto_queue;
1080 qe = q + tf->goto_queue_active;
1081 for (; q < qe; ++q)
1082 if (q->index < 0)
1083 do_return_redirection (q, lab, NULL);
1084 else
1085 do_goto_redirection (q, lab, NULL, tf);
1087 replace_goto_queue (tf);
1089 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1090 eh_else = get_eh_else (finally);
1091 if (eh_else)
1093 finally = gimple_eh_else_n_body (eh_else);
1094 lower_eh_constructs_1 (state, &finally);
1095 gimple_seq_add_seq (&tf->top_p_seq, finally);
1097 if (tf->may_throw)
1099 finally = gimple_eh_else_e_body (eh_else);
1100 lower_eh_constructs_1 (state, &finally);
1102 emit_post_landing_pad (&eh_seq, tf->region);
1103 gimple_seq_add_seq (&eh_seq, finally);
1106 else
1108 lower_eh_constructs_1 (state, &finally);
1109 gimple_seq_add_seq (&tf->top_p_seq, finally);
1111 if (tf->may_throw)
1113 emit_post_landing_pad (&eh_seq, tf->region);
1115 x = gimple_build_goto (lab);
1116 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1117 gimple_seq_add_stmt (&eh_seq, x);
1122 /* A subroutine of lower_try_finally. We have determined that there is
1123 exactly one destination of the finally block. Restructure the
1124 try_finally node for this special case. */
1126 static void
1127 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1129 struct goto_queue_node *q, *qe;
1130 geh_else *eh_else;
1131 glabel *label_stmt;
1132 gimple *x;
1133 gimple_seq finally;
1134 gimple_stmt_iterator gsi;
1135 tree finally_label;
1136 location_t loc = gimple_location (tf->try_finally_expr);
1138 finally = gimple_try_cleanup (tf->top_p);
1139 tf->top_p_seq = gimple_try_eval (tf->top_p);
1141 /* Since there's only one destination, and the destination edge can only
1142 either be EH or non-EH, that implies that all of our incoming edges
1143 are of the same type. Therefore we can lower EH_ELSE immediately. */
1144 eh_else = get_eh_else (finally);
1145 if (eh_else)
1147 if (tf->may_throw)
1148 finally = gimple_eh_else_e_body (eh_else);
1149 else
1150 finally = gimple_eh_else_n_body (eh_else);
1153 lower_eh_constructs_1 (state, &finally);
1155 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1157 gimple *stmt = gsi_stmt (gsi);
1158 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1160 tree block = gimple_block (stmt);
1161 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1162 gimple_set_block (stmt, block);
1166 if (tf->may_throw)
1168 /* Only reachable via the exception edge. Add the given label to
1169 the head of the FINALLY block. Append a RESX at the end. */
1170 emit_post_landing_pad (&eh_seq, tf->region);
1171 gimple_seq_add_seq (&eh_seq, finally);
1172 emit_resx (&eh_seq, tf->region);
1173 return;
1176 if (tf->may_fallthru)
1178 /* Only reachable via the fallthru edge. Do nothing but let
1179 the two blocks run together; we'll fall out the bottom. */
1180 gimple_seq_add_seq (&tf->top_p_seq, finally);
1181 return;
1184 finally_label = create_artificial_label (loc);
1185 label_stmt = gimple_build_label (finally_label);
1186 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
1188 gimple_seq_add_seq (&tf->top_p_seq, finally);
1190 q = tf->goto_queue;
1191 qe = q + tf->goto_queue_active;
1193 if (tf->may_return)
1195 /* Reachable by return expressions only. Redirect them. */
1196 for (; q < qe; ++q)
1197 do_return_redirection (q, finally_label, NULL);
1198 replace_goto_queue (tf);
1200 else
1202 /* Reachable by goto expressions only. Redirect them. */
1203 for (; q < qe; ++q)
1204 do_goto_redirection (q, finally_label, NULL, tf);
1205 replace_goto_queue (tf);
1207 if (tf->dest_array[0] == tf->fallthru_label)
1209 /* Reachable by goto to fallthru label only. Redirect it
1210 to the new label (already created, sadly), and do not
1211 emit the final branch out, or the fallthru label. */
1212 tf->fallthru_label = NULL;
1213 return;
1217 /* Place the original return/goto to the original destination
1218 immediately after the finally block. */
1219 x = tf->goto_queue[0].cont_stmt;
1220 gimple_seq_add_stmt (&tf->top_p_seq, x);
1221 maybe_record_in_goto_queue (state, x);
1224 /* A subroutine of lower_try_finally. There are multiple edges incoming
1225 and outgoing from the finally block. Implement this by duplicating the
1226 finally block for every destination. */
1228 static void
1229 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1231 gimple_seq finally;
1232 gimple_seq new_stmt;
1233 gimple_seq seq;
1234 gimple *x;
1235 geh_else *eh_else;
1236 tree tmp;
1237 location_t tf_loc = gimple_location (tf->try_finally_expr);
1239 finally = gimple_try_cleanup (tf->top_p);
1241 /* Notice EH_ELSE, and simplify some of the remaining code
1242 by considering FINALLY to be the normal return path only. */
1243 eh_else = get_eh_else (finally);
1244 if (eh_else)
1245 finally = gimple_eh_else_n_body (eh_else);
1247 tf->top_p_seq = gimple_try_eval (tf->top_p);
1248 new_stmt = NULL;
1250 if (tf->may_fallthru)
1252 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1253 lower_eh_constructs_1 (state, &seq);
1254 gimple_seq_add_seq (&new_stmt, seq);
1256 tmp = lower_try_finally_fallthru_label (tf);
1257 x = gimple_build_goto (tmp);
1258 gimple_set_location (x, tf_loc);
1259 gimple_seq_add_stmt (&new_stmt, x);
1262 if (tf->may_throw)
1264 /* We don't need to copy the EH path of EH_ELSE,
1265 since it is only emitted once. */
1266 if (eh_else)
1267 seq = gimple_eh_else_e_body (eh_else);
1268 else
1269 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1270 lower_eh_constructs_1 (state, &seq);
1272 emit_post_landing_pad (&eh_seq, tf->region);
1273 gimple_seq_add_seq (&eh_seq, seq);
1274 emit_resx (&eh_seq, tf->region);
1277 if (tf->goto_queue)
1279 struct goto_queue_node *q, *qe;
1280 int return_index, index;
1281 struct labels_s
1283 struct goto_queue_node *q;
1284 tree label;
1285 } *labels;
1287 return_index = tf->dest_array.length ();
1288 labels = XCNEWVEC (struct labels_s, return_index + 1);
1290 q = tf->goto_queue;
1291 qe = q + tf->goto_queue_active;
1292 for (; q < qe; q++)
1294 index = q->index < 0 ? return_index : q->index;
1296 if (!labels[index].q)
1297 labels[index].q = q;
1300 for (index = 0; index < return_index + 1; index++)
1302 tree lab;
1304 q = labels[index].q;
1305 if (! q)
1306 continue;
1308 lab = labels[index].label
1309 = create_artificial_label (tf_loc);
1311 if (index == return_index)
1312 do_return_redirection (q, lab, NULL);
1313 else
1314 do_goto_redirection (q, lab, NULL, tf);
1316 x = gimple_build_label (lab);
1317 gimple_seq_add_stmt (&new_stmt, x);
1319 seq = lower_try_finally_dup_block (finally, state, q->location);
1320 lower_eh_constructs_1 (state, &seq);
1321 gimple_seq_add_seq (&new_stmt, seq);
1323 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1324 maybe_record_in_goto_queue (state, q->cont_stmt);
1327 for (q = tf->goto_queue; q < qe; q++)
1329 tree lab;
1331 index = q->index < 0 ? return_index : q->index;
1333 if (labels[index].q == q)
1334 continue;
1336 lab = labels[index].label;
1338 if (index == return_index)
1339 do_return_redirection (q, lab, NULL);
1340 else
1341 do_goto_redirection (q, lab, NULL, tf);
1344 replace_goto_queue (tf);
1345 free (labels);
1348 /* Need to link new stmts after running replace_goto_queue due
1349 to not wanting to process the same goto stmts twice. */
1350 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1353 /* A subroutine of lower_try_finally. There are multiple edges incoming
1354 and outgoing from the finally block. Implement this by instrumenting
1355 each incoming edge and creating a switch statement at the end of the
1356 finally block that branches to the appropriate destination. */
1358 static void
1359 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1361 struct goto_queue_node *q, *qe;
1362 tree finally_tmp, finally_label;
1363 int return_index, eh_index, fallthru_index;
1364 int nlabels, ndests, j, last_case_index;
1365 tree last_case;
1366 auto_vec<tree> case_label_vec;
1367 gimple_seq switch_body = NULL;
1368 gimple *x;
1369 geh_else *eh_else;
1370 tree tmp;
1371 gimple *switch_stmt;
1372 gimple_seq finally;
1373 hash_map<tree, gimple *> *cont_map = NULL;
1374 /* The location of the TRY_FINALLY stmt. */
1375 location_t tf_loc = gimple_location (tf->try_finally_expr);
1376 /* The location of the finally block. */
1377 location_t finally_loc;
1379 finally = gimple_try_cleanup (tf->top_p);
1380 eh_else = get_eh_else (finally);
1382 /* Mash the TRY block to the head of the chain. */
1383 tf->top_p_seq = gimple_try_eval (tf->top_p);
1385 /* The location of the finally is either the last stmt in the finally
1386 block or the location of the TRY_FINALLY itself. */
1387 x = gimple_seq_last_stmt (finally);
1388 finally_loc = x ? gimple_location (x) : tf_loc;
1390 /* Prepare for switch statement generation. */
1391 nlabels = tf->dest_array.length ();
1392 return_index = nlabels;
1393 eh_index = return_index + tf->may_return;
1394 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1395 ndests = fallthru_index + tf->may_fallthru;
1397 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1398 finally_label = create_artificial_label (finally_loc);
1400 /* We use vec::quick_push on case_label_vec throughout this function,
1401 since we know the size in advance and allocate precisely as muce
1402 space as needed. */
1403 case_label_vec.create (ndests);
1404 last_case = NULL;
1405 last_case_index = 0;
1407 /* Begin inserting code for getting to the finally block. Things
1408 are done in this order to correspond to the sequence the code is
1409 laid out. */
1411 if (tf->may_fallthru)
1413 x = gimple_build_assign (finally_tmp,
1414 build_int_cst (integer_type_node,
1415 fallthru_index));
1416 gimple_set_location (x, finally_loc);
1417 gimple_seq_add_stmt (&tf->top_p_seq, x);
1419 tmp = build_int_cst (integer_type_node, fallthru_index);
1420 last_case = build_case_label (tmp, NULL,
1421 create_artificial_label (finally_loc));
1422 case_label_vec.quick_push (last_case);
1423 last_case_index++;
1425 x = gimple_build_label (CASE_LABEL (last_case));
1426 gimple_seq_add_stmt (&switch_body, x);
1428 tmp = lower_try_finally_fallthru_label (tf);
1429 x = gimple_build_goto (tmp);
1430 gimple_set_location (x, finally_loc);
1431 gimple_seq_add_stmt (&switch_body, x);
1434 /* For EH_ELSE, emit the exception path (plus resx) now, then
1435 subsequently we only need consider the normal path. */
1436 if (eh_else)
1438 if (tf->may_throw)
1440 finally = gimple_eh_else_e_body (eh_else);
1441 lower_eh_constructs_1 (state, &finally);
1443 emit_post_landing_pad (&eh_seq, tf->region);
1444 gimple_seq_add_seq (&eh_seq, finally);
1445 emit_resx (&eh_seq, tf->region);
1448 finally = gimple_eh_else_n_body (eh_else);
1450 else if (tf->may_throw)
1452 emit_post_landing_pad (&eh_seq, tf->region);
1454 x = gimple_build_assign (finally_tmp,
1455 build_int_cst (integer_type_node, eh_index));
1456 gimple_seq_add_stmt (&eh_seq, x);
1458 x = gimple_build_goto (finally_label);
1459 gimple_set_location (x, tf_loc);
1460 gimple_seq_add_stmt (&eh_seq, x);
1462 tmp = build_int_cst (integer_type_node, eh_index);
1463 last_case = build_case_label (tmp, NULL,
1464 create_artificial_label (tf_loc));
1465 case_label_vec.quick_push (last_case);
1466 last_case_index++;
1468 x = gimple_build_label (CASE_LABEL (last_case));
1469 gimple_seq_add_stmt (&eh_seq, x);
1470 emit_resx (&eh_seq, tf->region);
1473 x = gimple_build_label (finally_label);
1474 gimple_seq_add_stmt (&tf->top_p_seq, x);
1476 lower_eh_constructs_1 (state, &finally);
1477 gimple_seq_add_seq (&tf->top_p_seq, finally);
1479 /* Redirect each incoming goto edge. */
1480 q = tf->goto_queue;
1481 qe = q + tf->goto_queue_active;
1482 j = last_case_index + tf->may_return;
1483 /* Prepare the assignments to finally_tmp that are executed upon the
1484 entrance through a particular edge. */
1485 for (; q < qe; ++q)
1487 gimple_seq mod = NULL;
1488 int switch_id;
1489 unsigned int case_index;
1491 if (q->index < 0)
1493 x = gimple_build_assign (finally_tmp,
1494 build_int_cst (integer_type_node,
1495 return_index));
1496 gimple_seq_add_stmt (&mod, x);
1497 do_return_redirection (q, finally_label, mod);
1498 switch_id = return_index;
1500 else
1502 x = gimple_build_assign (finally_tmp,
1503 build_int_cst (integer_type_node, q->index));
1504 gimple_seq_add_stmt (&mod, x);
1505 do_goto_redirection (q, finally_label, mod, tf);
1506 switch_id = q->index;
1509 case_index = j + q->index;
1510 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1512 tree case_lab;
1513 tmp = build_int_cst (integer_type_node, switch_id);
1514 case_lab = build_case_label (tmp, NULL,
1515 create_artificial_label (tf_loc));
1516 /* We store the cont_stmt in the pointer map, so that we can recover
1517 it in the loop below. */
1518 if (!cont_map)
1519 cont_map = new hash_map<tree, gimple *>;
1520 cont_map->put (case_lab, q->cont_stmt);
1521 case_label_vec.quick_push (case_lab);
1524 for (j = last_case_index; j < last_case_index + nlabels; j++)
1526 gimple *cont_stmt;
1528 last_case = case_label_vec[j];
1530 gcc_assert (last_case);
1531 gcc_assert (cont_map);
1533 cont_stmt = *cont_map->get (last_case);
1535 x = gimple_build_label (CASE_LABEL (last_case));
1536 gimple_seq_add_stmt (&switch_body, x);
1537 gimple_seq_add_stmt (&switch_body, cont_stmt);
1538 maybe_record_in_goto_queue (state, cont_stmt);
1540 if (cont_map)
1541 delete cont_map;
1543 replace_goto_queue (tf);
1545 /* Make sure that the last case is the default label, as one is required.
1546 Then sort the labels, which is also required in GIMPLE. */
1547 CASE_LOW (last_case) = NULL;
1548 tree tem = case_label_vec.pop ();
1549 gcc_assert (tem == last_case);
1550 sort_case_labels (case_label_vec);
1552 /* Build the switch statement, setting last_case to be the default
1553 label. */
1554 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1555 case_label_vec);
1556 gimple_set_location (switch_stmt, finally_loc);
1558 /* Need to link SWITCH_STMT after running replace_goto_queue
1559 due to not wanting to process the same goto stmts twice. */
1560 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1561 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1564 /* Decide whether or not we are going to duplicate the finally block.
1565 There are several considerations.
1567 Second, we'd like to prevent egregious code growth. One way to
1568 do this is to estimate the size of the finally block, multiply
1569 that by the number of copies we'd need to make, and compare against
1570 the estimate of the size of the switch machinery we'd have to add. */
1572 static bool
1573 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1575 int f_estimate, sw_estimate;
1576 geh_else *eh_else;
1578 /* If there's an EH_ELSE involved, the exception path is separate
1579 and really doesn't come into play for this computation. */
1580 eh_else = get_eh_else (finally);
1581 if (eh_else)
1583 ndests -= may_throw;
1584 finally = gimple_eh_else_n_body (eh_else);
1587 if (!optimize)
1589 gimple_stmt_iterator gsi;
1591 if (ndests == 1)
1592 return true;
1594 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1596 /* Duplicate __builtin_stack_restore in the hope of eliminating it
1597 on the EH paths and, consequently, useless cleanups. */
1598 gimple *stmt = gsi_stmt (gsi);
1599 if (!is_gimple_debug (stmt)
1600 && !gimple_clobber_p (stmt)
1601 && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1602 return false;
1604 return true;
1607 /* Finally estimate N times, plus N gotos. */
1608 f_estimate = estimate_num_insns_seq (finally, &eni_size_weights);
1609 f_estimate = (f_estimate + 1) * ndests;
1611 /* Switch statement (cost 10), N variable assignments, N gotos. */
1612 sw_estimate = 10 + 2 * ndests;
1614 /* Optimize for size clearly wants our best guess. */
1615 if (optimize_function_for_size_p (cfun))
1616 return f_estimate < sw_estimate;
1618 /* ??? These numbers are completely made up so far. */
1619 if (optimize > 1)
1620 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1621 else
1622 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1625 /* REG is current region of a LEH state.
1626 is the enclosing region for a possible cleanup region, or the region
1627 itself. Returns TRUE if such a region would be unreachable.
1629 Cleanup regions within a must-not-throw region aren't actually reachable
1630 even if there are throwing stmts within them, because the personality
1631 routine will call terminate before unwinding. */
1633 static bool
1634 cleanup_is_dead_in (leh_state *state)
1636 if (flag_checking)
1638 eh_region reg = state->cur_region;
1639 while (reg && reg->type == ERT_CLEANUP)
1640 reg = reg->outer;
1642 gcc_assert (reg == state->outer_non_cleanup);
1645 eh_region reg = state->outer_non_cleanup;
1646 return (reg && reg->type == ERT_MUST_NOT_THROW);
1649 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1650 to a sequence of labels and blocks, plus the exception region trees
1651 that record all the magic. This is complicated by the need to
1652 arrange for the FINALLY block to be executed on all exits. */
1654 static gimple_seq
1655 lower_try_finally (struct leh_state *state, gtry *tp)
1657 struct leh_tf_state this_tf;
1658 struct leh_state this_state;
1659 int ndests;
1660 gimple_seq old_eh_seq;
1662 /* Process the try block. */
1664 memset (&this_tf, 0, sizeof (this_tf));
1665 this_tf.try_finally_expr = tp;
1666 this_tf.top_p = tp;
1667 this_tf.outer = state;
1668 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state))
1670 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1671 this_state.cur_region = this_tf.region;
1673 else
1675 this_tf.region = NULL;
1676 this_state.cur_region = state->cur_region;
1679 this_state.outer_non_cleanup = state->outer_non_cleanup;
1680 this_state.ehp_region = state->ehp_region;
1681 this_state.tf = &this_tf;
1683 old_eh_seq = eh_seq;
1684 eh_seq = NULL;
1686 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1688 /* Determine if the try block is escaped through the bottom. */
1689 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1691 /* Determine if any exceptions are possible within the try block. */
1692 if (this_tf.region)
1693 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1694 if (this_tf.may_throw)
1695 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1697 /* Determine how many edges (still) reach the finally block. Or rather,
1698 how many destinations are reached by the finally block. Use this to
1699 determine how we process the finally block itself. */
1701 ndests = this_tf.dest_array.length ();
1702 ndests += this_tf.may_fallthru;
1703 ndests += this_tf.may_return;
1704 ndests += this_tf.may_throw;
1706 /* If the FINALLY block is not reachable, dike it out. */
1707 if (ndests == 0)
1709 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1710 gimple_try_set_cleanup (tp, NULL);
1712 /* If the finally block doesn't fall through, then any destination
1713 we might try to impose there isn't reached either. There may be
1714 some minor amount of cleanup and redirection still needed. */
1715 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1716 lower_try_finally_nofallthru (state, &this_tf);
1718 /* We can easily special-case redirection to a single destination. */
1719 else if (ndests == 1)
1720 lower_try_finally_onedest (state, &this_tf);
1721 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1722 gimple_try_cleanup (tp)))
1723 lower_try_finally_copy (state, &this_tf);
1724 else
1725 lower_try_finally_switch (state, &this_tf);
1727 /* If someone requested we add a label at the end of the transformed
1728 block, do so. */
1729 if (this_tf.fallthru_label)
1731 /* This must be reached only if ndests == 0. */
1732 gimple *x = gimple_build_label (this_tf.fallthru_label);
1733 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1736 this_tf.dest_array.release ();
1737 free (this_tf.goto_queue);
1738 if (this_tf.goto_queue_map)
1739 delete this_tf.goto_queue_map;
1741 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1742 If there was no old eh_seq, then the append is trivially already done. */
1743 if (old_eh_seq)
1745 if (eh_seq == NULL)
1746 eh_seq = old_eh_seq;
1747 else
1749 gimple_seq new_eh_seq = eh_seq;
1750 eh_seq = old_eh_seq;
1751 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1755 return this_tf.top_p_seq;
1758 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1759 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1760 exception region trees that records all the magic. */
1762 static gimple_seq
1763 lower_catch (struct leh_state *state, gtry *tp)
1765 eh_region try_region = NULL;
1766 struct leh_state this_state = *state;
1767 gimple_stmt_iterator gsi;
1768 tree out_label;
1769 gimple_seq new_seq, cleanup;
1770 gimple *x;
1771 geh_dispatch *eh_dispatch;
1772 location_t try_catch_loc = gimple_location (tp);
1773 location_t catch_loc = UNKNOWN_LOCATION;
1775 if (flag_exceptions)
1777 try_region = gen_eh_region_try (state->cur_region);
1778 this_state.cur_region = try_region;
1779 this_state.outer_non_cleanup = this_state.cur_region;
1782 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1784 if (!eh_region_may_contain_throw (try_region))
1785 return gimple_try_eval (tp);
1787 new_seq = NULL;
1788 eh_dispatch = gimple_build_eh_dispatch (try_region->index);
1789 gimple_seq_add_stmt (&new_seq, eh_dispatch);
1790 emit_resx (&new_seq, try_region);
1792 this_state.cur_region = state->cur_region;
1793 this_state.outer_non_cleanup = state->outer_non_cleanup;
1794 this_state.ehp_region = try_region;
1796 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1797 itself, so that e.g. for coverage purposes the nested cleanups don't
1798 appear before the cleanup body. See PR64634 for details. */
1799 gimple_seq old_eh_seq = eh_seq;
1800 eh_seq = NULL;
1802 out_label = NULL;
1803 cleanup = gimple_try_cleanup (tp);
1804 for (gsi = gsi_start (cleanup);
1805 !gsi_end_p (gsi);
1806 gsi_next (&gsi))
1808 eh_catch c;
1809 gcatch *catch_stmt;
1810 gimple_seq handler;
1812 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1813 if (catch_loc == UNKNOWN_LOCATION)
1814 catch_loc = gimple_location (catch_stmt);
1815 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
1817 handler = gimple_catch_handler (catch_stmt);
1818 lower_eh_constructs_1 (&this_state, &handler);
1820 c->label = create_artificial_label (UNKNOWN_LOCATION);
1821 x = gimple_build_label (c->label);
1822 gimple_seq_add_stmt (&new_seq, x);
1824 gimple_seq_add_seq (&new_seq, handler);
1826 if (gimple_seq_may_fallthru (new_seq))
1828 if (!out_label)
1829 out_label = create_artificial_label (try_catch_loc);
1831 x = gimple_build_goto (out_label);
1832 gimple_seq_add_stmt (&new_seq, x);
1834 if (!c->type_list)
1835 break;
1838 /* Try to set a location on the dispatching construct to avoid inheriting
1839 the location of the previous statement. */
1840 gimple_set_location (eh_dispatch, catch_loc);
1842 gimple_try_set_cleanup (tp, new_seq);
1844 gimple_seq new_eh_seq = eh_seq;
1845 eh_seq = old_eh_seq;
1846 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1847 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1848 return ret_seq;
1851 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1852 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1853 region trees that record all the magic. */
1855 static gimple_seq
1856 lower_eh_filter (struct leh_state *state, gtry *tp)
1858 struct leh_state this_state = *state;
1859 eh_region this_region = NULL;
1860 gimple *inner, *x;
1861 gimple_seq new_seq;
1863 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1865 if (flag_exceptions)
1867 this_region = gen_eh_region_allowed (state->cur_region,
1868 gimple_eh_filter_types (inner));
1869 this_state.cur_region = this_region;
1870 this_state.outer_non_cleanup = this_state.cur_region;
1873 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1875 if (!eh_region_may_contain_throw (this_region))
1876 return gimple_try_eval (tp);
1878 this_state.cur_region = state->cur_region;
1879 this_state.ehp_region = this_region;
1881 new_seq = NULL;
1882 x = gimple_build_eh_dispatch (this_region->index);
1883 gimple_set_location (x, gimple_location (tp));
1884 gimple_seq_add_stmt (&new_seq, x);
1885 emit_resx (&new_seq, this_region);
1887 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1888 x = gimple_build_label (this_region->u.allowed.label);
1889 gimple_seq_add_stmt (&new_seq, x);
1891 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1892 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1894 gimple_try_set_cleanup (tp, new_seq);
1896 return frob_into_branch_around (tp, this_region, NULL);
1899 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1900 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1901 plus the exception region trees that record all the magic. */
1903 static gimple_seq
1904 lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
1906 struct leh_state this_state = *state;
1908 if (flag_exceptions)
1910 gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1911 eh_region this_region;
1913 this_region = gen_eh_region_must_not_throw (state->cur_region);
1914 this_region->u.must_not_throw.failure_decl
1915 = gimple_eh_must_not_throw_fndecl (
1916 as_a <geh_mnt *> (inner));
1917 this_region->u.must_not_throw.failure_loc
1918 = LOCATION_LOCUS (gimple_location (tp));
1920 /* In order to get mangling applied to this decl, we must mark it
1921 used now. Otherwise, pass_ipa_free_lang_data won't think it
1922 needs to happen. */
1923 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1925 this_state.cur_region = this_region;
1926 this_state.outer_non_cleanup = this_state.cur_region;
1929 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1931 return gimple_try_eval (tp);
1934 /* Implement a cleanup expression. This is similar to try-finally,
1935 except that we only execute the cleanup block for exception edges. */
1937 static gimple_seq
1938 lower_cleanup (struct leh_state *state, gtry *tp)
1940 struct leh_state this_state = *state;
1941 eh_region this_region = NULL;
1942 struct leh_tf_state fake_tf;
1943 gimple_seq result;
1944 bool cleanup_dead = cleanup_is_dead_in (state);
1946 if (flag_exceptions && !cleanup_dead)
1948 this_region = gen_eh_region_cleanup (state->cur_region);
1949 this_state.cur_region = this_region;
1950 this_state.outer_non_cleanup = state->outer_non_cleanup;
1953 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1955 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1956 return gimple_try_eval (tp);
1958 /* Build enough of a try-finally state so that we can reuse
1959 honor_protect_cleanup_actions. */
1960 memset (&fake_tf, 0, sizeof (fake_tf));
1961 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1962 fake_tf.outer = state;
1963 fake_tf.region = this_region;
1964 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1965 fake_tf.may_throw = true;
1967 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1969 if (fake_tf.may_throw)
1971 /* In this case honor_protect_cleanup_actions had nothing to do,
1972 and we should process this normally. */
1973 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1974 result = frob_into_branch_around (tp, this_region,
1975 fake_tf.fallthru_label);
1977 else
1979 /* In this case honor_protect_cleanup_actions did nearly all of
1980 the work. All we have left is to append the fallthru_label. */
1982 result = gimple_try_eval (tp);
1983 if (fake_tf.fallthru_label)
1985 gimple *x = gimple_build_label (fake_tf.fallthru_label);
1986 gimple_seq_add_stmt (&result, x);
1989 return result;
1992 /* Main loop for lowering eh constructs. Also moves gsi to the next
1993 statement. */
1995 static void
1996 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1998 gimple_seq replace;
1999 gimple *x;
2000 gimple *stmt = gsi_stmt (*gsi);
2002 switch (gimple_code (stmt))
2004 case GIMPLE_CALL:
2006 tree fndecl = gimple_call_fndecl (stmt);
2007 tree rhs, lhs;
2009 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
2010 switch (DECL_FUNCTION_CODE (fndecl))
2012 case BUILT_IN_EH_POINTER:
2013 /* The front end may have generated a call to
2014 __builtin_eh_pointer (0) within a catch region. Replace
2015 this zero argument with the current catch region number. */
2016 if (state->ehp_region)
2018 tree nr = build_int_cst (integer_type_node,
2019 state->ehp_region->index);
2020 gimple_call_set_arg (stmt, 0, nr);
2022 else
2024 /* The user has dome something silly. Remove it. */
2025 rhs = null_pointer_node;
2026 goto do_replace;
2028 break;
2030 case BUILT_IN_EH_FILTER:
2031 /* ??? This should never appear, but since it's a builtin it
2032 is accessible to abuse by users. Just remove it and
2033 replace the use with the arbitrary value zero. */
2034 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2035 do_replace:
2036 lhs = gimple_call_lhs (stmt);
2037 x = gimple_build_assign (lhs, rhs);
2038 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2039 /* FALLTHRU */
2041 case BUILT_IN_EH_COPY_VALUES:
2042 /* Likewise this should not appear. Remove it. */
2043 gsi_remove (gsi, true);
2044 return;
2046 default:
2047 break;
2050 /* FALLTHRU */
2052 case GIMPLE_ASSIGN:
2053 /* If the stmt can throw, use a new temporary for the assignment
2054 to a LHS. This makes sure the old value of the LHS is
2055 available on the EH edge. Only do so for statements that
2056 potentially fall through (no noreturn calls e.g.), otherwise
2057 this new assignment might create fake fallthru regions. */
2058 if (stmt_could_throw_p (cfun, stmt)
2059 && gimple_has_lhs (stmt)
2060 && gimple_stmt_may_fallthru (stmt)
2061 && !tree_could_throw_p (gimple_get_lhs (stmt))
2062 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2064 tree lhs = gimple_get_lhs (stmt);
2065 tree tmp = create_tmp_var (TREE_TYPE (lhs));
2066 gimple *s = gimple_build_assign (lhs, tmp);
2067 gimple_set_location (s, gimple_location (stmt));
2068 gimple_set_block (s, gimple_block (stmt));
2069 gimple_set_lhs (stmt, tmp);
2070 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2072 /* Look for things that can throw exceptions, and record them. */
2073 if (state->cur_region && stmt_could_throw_p (cfun, stmt))
2075 record_stmt_eh_region (state->cur_region, stmt);
2076 note_eh_region_may_contain_throw (state->cur_region);
2078 break;
2080 case GIMPLE_COND:
2081 case GIMPLE_GOTO:
2082 case GIMPLE_RETURN:
2083 maybe_record_in_goto_queue (state, stmt);
2084 break;
2086 case GIMPLE_SWITCH:
2087 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
2088 break;
2090 case GIMPLE_TRY:
2092 gtry *try_stmt = as_a <gtry *> (stmt);
2093 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2094 replace = lower_try_finally (state, try_stmt);
2095 else
2097 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2098 if (!x)
2100 replace = gimple_try_eval (try_stmt);
2101 lower_eh_constructs_1 (state, &replace);
2103 else
2104 switch (gimple_code (x))
2106 case GIMPLE_CATCH:
2107 replace = lower_catch (state, try_stmt);
2108 break;
2109 case GIMPLE_EH_FILTER:
2110 replace = lower_eh_filter (state, try_stmt);
2111 break;
2112 case GIMPLE_EH_MUST_NOT_THROW:
2113 replace = lower_eh_must_not_throw (state, try_stmt);
2114 break;
2115 case GIMPLE_EH_ELSE:
2116 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2117 gcc_unreachable ();
2118 default:
2119 replace = lower_cleanup (state, try_stmt);
2120 break;
2125 /* Remove the old stmt and insert the transformed sequence
2126 instead. */
2127 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2128 gsi_remove (gsi, true);
2130 /* Return since we don't want gsi_next () */
2131 return;
2133 case GIMPLE_EH_ELSE:
2134 /* We should be eliminating this in lower_try_finally et al. */
2135 gcc_unreachable ();
2137 default:
2138 /* A type, a decl, or some kind of statement that we're not
2139 interested in. Don't walk them. */
2140 break;
2143 gsi_next (gsi);
2146 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2148 static void
2149 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2151 gimple_stmt_iterator gsi;
2152 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2153 lower_eh_constructs_2 (state, &gsi);
2156 namespace {
2158 const pass_data pass_data_lower_eh =
2160 GIMPLE_PASS, /* type */
2161 "eh", /* name */
2162 OPTGROUP_NONE, /* optinfo_flags */
2163 TV_TREE_EH, /* tv_id */
2164 PROP_gimple_lcf, /* properties_required */
2165 PROP_gimple_leh, /* properties_provided */
2166 0, /* properties_destroyed */
2167 0, /* todo_flags_start */
2168 0, /* todo_flags_finish */
2171 class pass_lower_eh : public gimple_opt_pass
2173 public:
2174 pass_lower_eh (gcc::context *ctxt)
2175 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2178 /* opt_pass methods: */
2179 virtual unsigned int execute (function *);
2181 }; // class pass_lower_eh
2183 unsigned int
2184 pass_lower_eh::execute (function *fun)
2186 struct leh_state null_state;
2187 gimple_seq bodyp;
2189 bodyp = gimple_body (current_function_decl);
2190 if (bodyp == NULL)
2191 return 0;
2193 finally_tree = new hash_table<finally_tree_hasher> (31);
2194 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2195 memset (&null_state, 0, sizeof (null_state));
2197 collect_finally_tree_1 (bodyp, NULL);
2198 lower_eh_constructs_1 (&null_state, &bodyp);
2199 gimple_set_body (current_function_decl, bodyp);
2201 /* We assume there's a return statement, or something, at the end of
2202 the function, and thus ploping the EH sequence afterward won't
2203 change anything. */
2204 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2205 gimple_seq_add_seq (&bodyp, eh_seq);
2207 /* We assume that since BODYP already existed, adding EH_SEQ to it
2208 didn't change its value, and we don't have to re-set the function. */
2209 gcc_assert (bodyp == gimple_body (current_function_decl));
2211 delete finally_tree;
2212 finally_tree = NULL;
2213 BITMAP_FREE (eh_region_may_contain_throw_map);
2214 eh_seq = NULL;
2216 /* If this function needs a language specific EH personality routine
2217 and the frontend didn't already set one do so now. */
2218 if (function_needs_eh_personality (fun) == eh_personality_lang
2219 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2220 DECL_FUNCTION_PERSONALITY (current_function_decl)
2221 = lang_hooks.eh_personality ();
2223 return 0;
2226 } // anon namespace
2228 gimple_opt_pass *
2229 make_pass_lower_eh (gcc::context *ctxt)
2231 return new pass_lower_eh (ctxt);
2234 /* Create the multiple edges from an EH_DISPATCH statement to all of
2235 the possible handlers for its EH region. Return true if there's
2236 no fallthru edge; false if there is. */
2238 bool
2239 make_eh_dispatch_edges (geh_dispatch *stmt)
2241 eh_region r;
2242 eh_catch c;
2243 basic_block src, dst;
2245 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2246 src = gimple_bb (stmt);
2248 switch (r->type)
2250 case ERT_TRY:
2251 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2253 dst = label_to_block (cfun, c->label);
2254 make_edge (src, dst, 0);
2256 /* A catch-all handler doesn't have a fallthru. */
2257 if (c->type_list == NULL)
2258 return false;
2260 break;
2262 case ERT_ALLOWED_EXCEPTIONS:
2263 dst = label_to_block (cfun, r->u.allowed.label);
2264 make_edge (src, dst, 0);
2265 break;
2267 default:
2268 gcc_unreachable ();
2271 return true;
2274 /* Create the single EH edge from STMT to its nearest landing pad,
2275 if there is such a landing pad within the current function. */
2277 void
2278 make_eh_edges (gimple *stmt)
2280 basic_block src, dst;
2281 eh_landing_pad lp;
2282 int lp_nr;
2284 lp_nr = lookup_stmt_eh_lp (stmt);
2285 if (lp_nr <= 0)
2286 return;
2288 lp = get_eh_landing_pad_from_number (lp_nr);
2289 gcc_assert (lp != NULL);
2291 src = gimple_bb (stmt);
2292 dst = label_to_block (cfun, lp->post_landing_pad);
2293 make_edge (src, dst, EDGE_EH);
2296 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2297 do not actually perform the final edge redirection.
2299 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2300 we intend to change the destination EH region as well; this means
2301 EH_LANDING_PAD_NR must already be set on the destination block label.
2302 If false, we're being called from generic cfg manipulation code and we
2303 should preserve our place within the region tree. */
2305 static void
2306 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2308 eh_landing_pad old_lp, new_lp;
2309 basic_block old_bb;
2310 gimple *throw_stmt;
2311 int old_lp_nr, new_lp_nr;
2312 tree old_label, new_label;
2313 edge_iterator ei;
2314 edge e;
2316 old_bb = edge_in->dest;
2317 old_label = gimple_block_label (old_bb);
2318 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2319 gcc_assert (old_lp_nr > 0);
2320 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2322 throw_stmt = last_stmt (edge_in->src);
2323 gcc_checking_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2325 new_label = gimple_block_label (new_bb);
2327 /* Look for an existing region that might be using NEW_BB already. */
2328 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2329 if (new_lp_nr)
2331 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2332 gcc_assert (new_lp);
2334 /* Unless CHANGE_REGION is true, the new and old landing pad
2335 had better be associated with the same EH region. */
2336 gcc_assert (change_region || new_lp->region == old_lp->region);
2338 else
2340 new_lp = NULL;
2341 gcc_assert (!change_region);
2344 /* Notice when we redirect the last EH edge away from OLD_BB. */
2345 FOR_EACH_EDGE (e, ei, old_bb->preds)
2346 if (e != edge_in && (e->flags & EDGE_EH))
2347 break;
2349 if (new_lp)
2351 /* NEW_LP already exists. If there are still edges into OLD_LP,
2352 there's nothing to do with the EH tree. If there are no more
2353 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2354 If CHANGE_REGION is true, then our caller is expecting to remove
2355 the landing pad. */
2356 if (e == NULL && !change_region)
2357 remove_eh_landing_pad (old_lp);
2359 else
2361 /* No correct landing pad exists. If there are no more edges
2362 into OLD_LP, then we can simply re-use the existing landing pad.
2363 Otherwise, we have to create a new landing pad. */
2364 if (e == NULL)
2366 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2367 new_lp = old_lp;
2369 else
2370 new_lp = gen_eh_landing_pad (old_lp->region);
2371 new_lp->post_landing_pad = new_label;
2372 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2375 /* Maybe move the throwing statement to the new region. */
2376 if (old_lp != new_lp)
2378 remove_stmt_from_eh_lp (throw_stmt);
2379 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2383 /* Redirect EH edge E to NEW_BB. */
2385 edge
2386 redirect_eh_edge (edge edge_in, basic_block new_bb)
2388 redirect_eh_edge_1 (edge_in, new_bb, false);
2389 return ssa_redirect_edge (edge_in, new_bb);
2392 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2393 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2394 The actual edge update will happen in the caller. */
2396 void
2397 redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
2399 tree new_lab = gimple_block_label (new_bb);
2400 bool any_changed = false;
2401 basic_block old_bb;
2402 eh_region r;
2403 eh_catch c;
2405 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2406 switch (r->type)
2408 case ERT_TRY:
2409 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2411 old_bb = label_to_block (cfun, c->label);
2412 if (old_bb == e->dest)
2414 c->label = new_lab;
2415 any_changed = true;
2418 break;
2420 case ERT_ALLOWED_EXCEPTIONS:
2421 old_bb = label_to_block (cfun, r->u.allowed.label);
2422 gcc_assert (old_bb == e->dest);
2423 r->u.allowed.label = new_lab;
2424 any_changed = true;
2425 break;
2427 default:
2428 gcc_unreachable ();
2431 gcc_assert (any_changed);
2434 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2436 bool
2437 operation_could_trap_helper_p (enum tree_code op,
2438 bool fp_operation,
2439 bool honor_trapv,
2440 bool honor_nans,
2441 bool honor_snans,
2442 tree divisor,
2443 bool *handled)
2445 *handled = true;
2446 switch (op)
2448 case TRUNC_DIV_EXPR:
2449 case CEIL_DIV_EXPR:
2450 case FLOOR_DIV_EXPR:
2451 case ROUND_DIV_EXPR:
2452 case EXACT_DIV_EXPR:
2453 case CEIL_MOD_EXPR:
2454 case FLOOR_MOD_EXPR:
2455 case ROUND_MOD_EXPR:
2456 case TRUNC_MOD_EXPR:
2457 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2458 return true;
2459 if (TREE_CODE (divisor) == VECTOR_CST)
2461 /* Inspired by initializer_each_zero_or_onep. */
2462 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (divisor);
2463 if (VECTOR_CST_STEPPED_P (divisor)
2464 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (divisor))
2465 .is_constant (&nelts))
2466 return true;
2467 for (unsigned int i = 0; i < nelts; ++i)
2469 tree elt = vector_cst_elt (divisor, i);
2470 if (integer_zerop (elt))
2471 return true;
2474 return false;
2476 case RDIV_EXPR:
2477 if (honor_snans)
2478 return true;
2479 gcc_assert (fp_operation);
2480 return flag_trapping_math;
2482 case LT_EXPR:
2483 case LE_EXPR:
2484 case GT_EXPR:
2485 case GE_EXPR:
2486 case LTGT_EXPR:
2487 /* Some floating point comparisons may trap. */
2488 return honor_nans;
2490 case EQ_EXPR:
2491 case NE_EXPR:
2492 case UNORDERED_EXPR:
2493 case ORDERED_EXPR:
2494 case UNLT_EXPR:
2495 case UNLE_EXPR:
2496 case UNGT_EXPR:
2497 case UNGE_EXPR:
2498 case UNEQ_EXPR:
2499 return honor_snans;
2501 case NEGATE_EXPR:
2502 case ABS_EXPR:
2503 case CONJ_EXPR:
2504 /* These operations don't trap with floating point. */
2505 if (honor_trapv)
2506 return true;
2507 return false;
2509 case ABSU_EXPR:
2510 /* ABSU_EXPR never traps. */
2511 return false;
2513 case PLUS_EXPR:
2514 case MINUS_EXPR:
2515 case MULT_EXPR:
2516 /* Any floating arithmetic may trap. */
2517 if (fp_operation && flag_trapping_math)
2518 return true;
2519 if (honor_trapv)
2520 return true;
2521 return false;
2523 case COMPLEX_EXPR:
2524 case CONSTRUCTOR:
2525 /* Constructing an object cannot trap. */
2526 return false;
2528 case COND_EXPR:
2529 case VEC_COND_EXPR:
2530 /* Whether *COND_EXPR can trap depends on whether the
2531 first argument can trap, so signal it as not handled.
2532 Whether lhs is floating or not doesn't matter. */
2533 *handled = false;
2534 return false;
2536 default:
2537 /* Any floating arithmetic may trap. */
2538 if (fp_operation && flag_trapping_math)
2539 return true;
2541 *handled = false;
2542 return false;
2546 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2547 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2548 type operands that may trap. If OP is a division operator, DIVISOR contains
2549 the value of the divisor. */
2551 bool
2552 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2553 tree divisor)
2555 bool honor_nans = (fp_operation && flag_trapping_math
2556 && !flag_finite_math_only);
2557 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2558 bool handled;
2560 /* This function cannot tell whether or not COND_EXPR could trap,
2561 because that depends on its condition op. */
2562 gcc_assert (op != COND_EXPR);
2564 if (TREE_CODE_CLASS (op) != tcc_comparison
2565 && TREE_CODE_CLASS (op) != tcc_unary
2566 && TREE_CODE_CLASS (op) != tcc_binary)
2567 return false;
2569 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2570 honor_nans, honor_snans, divisor,
2571 &handled);
2575 /* Returns true if it is possible to prove that the index of
2576 an array access REF (an ARRAY_REF expression) falls into the
2577 array bounds. */
2579 static bool
2580 in_array_bounds_p (tree ref)
2582 tree idx = TREE_OPERAND (ref, 1);
2583 tree min, max;
2585 if (TREE_CODE (idx) != INTEGER_CST)
2586 return false;
2588 min = array_ref_low_bound (ref);
2589 max = array_ref_up_bound (ref);
2590 if (!min
2591 || !max
2592 || TREE_CODE (min) != INTEGER_CST
2593 || TREE_CODE (max) != INTEGER_CST)
2594 return false;
2596 if (tree_int_cst_lt (idx, min)
2597 || tree_int_cst_lt (max, idx))
2598 return false;
2600 return true;
2603 /* Returns true if it is possible to prove that the range of
2604 an array access REF (an ARRAY_RANGE_REF expression) falls
2605 into the array bounds. */
2607 static bool
2608 range_in_array_bounds_p (tree ref)
2610 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2611 tree range_min, range_max, min, max;
2613 range_min = TYPE_MIN_VALUE (domain_type);
2614 range_max = TYPE_MAX_VALUE (domain_type);
2615 if (!range_min
2616 || !range_max
2617 || TREE_CODE (range_min) != INTEGER_CST
2618 || TREE_CODE (range_max) != INTEGER_CST)
2619 return false;
2621 min = array_ref_low_bound (ref);
2622 max = array_ref_up_bound (ref);
2623 if (!min
2624 || !max
2625 || TREE_CODE (min) != INTEGER_CST
2626 || TREE_CODE (max) != INTEGER_CST)
2627 return false;
2629 if (tree_int_cst_lt (range_min, min)
2630 || tree_int_cst_lt (max, range_max))
2631 return false;
2633 return true;
2636 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2637 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2638 This routine expects only GIMPLE lhs or rhs input. */
2640 bool
2641 tree_could_trap_p (tree expr)
2643 enum tree_code code;
2644 bool fp_operation = false;
2645 bool honor_trapv = false;
2646 tree t, base, div = NULL_TREE;
2648 if (!expr)
2649 return false;
2651 /* In COND_EXPR and VEC_COND_EXPR only the condition may trap, but
2652 they won't appear as operands in GIMPLE form, so this is just for the
2653 GENERIC uses where it needs to recurse on the operands and so
2654 *COND_EXPR itself doesn't trap. */
2655 if (TREE_CODE (expr) == COND_EXPR || TREE_CODE (expr) == VEC_COND_EXPR)
2656 return false;
2658 code = TREE_CODE (expr);
2659 t = TREE_TYPE (expr);
2661 if (t)
2663 if (COMPARISON_CLASS_P (expr))
2664 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2665 else
2666 fp_operation = FLOAT_TYPE_P (t);
2667 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2670 if (TREE_CODE_CLASS (code) == tcc_binary)
2671 div = TREE_OPERAND (expr, 1);
2672 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2673 return true;
2675 restart:
2676 switch (code)
2678 case COMPONENT_REF:
2679 case REALPART_EXPR:
2680 case IMAGPART_EXPR:
2681 case BIT_FIELD_REF:
2682 case VIEW_CONVERT_EXPR:
2683 case WITH_SIZE_EXPR:
2684 expr = TREE_OPERAND (expr, 0);
2685 code = TREE_CODE (expr);
2686 goto restart;
2688 case ARRAY_RANGE_REF:
2689 base = TREE_OPERAND (expr, 0);
2690 if (tree_could_trap_p (base))
2691 return true;
2692 if (TREE_THIS_NOTRAP (expr))
2693 return false;
2694 return !range_in_array_bounds_p (expr);
2696 case ARRAY_REF:
2697 base = TREE_OPERAND (expr, 0);
2698 if (tree_could_trap_p (base))
2699 return true;
2700 if (TREE_THIS_NOTRAP (expr))
2701 return false;
2702 return !in_array_bounds_p (expr);
2704 case TARGET_MEM_REF:
2705 case MEM_REF:
2706 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2707 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2708 return true;
2709 if (TREE_THIS_NOTRAP (expr))
2710 return false;
2711 /* We cannot prove that the access is in-bounds when we have
2712 variable-index TARGET_MEM_REFs. */
2713 if (code == TARGET_MEM_REF
2714 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2715 return true;
2716 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2718 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2719 poly_offset_int off = mem_ref_offset (expr);
2720 if (maybe_lt (off, 0))
2721 return true;
2722 if (TREE_CODE (base) == STRING_CST)
2723 return maybe_le (TREE_STRING_LENGTH (base), off);
2724 tree size = DECL_SIZE_UNIT (base);
2725 if (size == NULL_TREE
2726 || !poly_int_tree_p (size)
2727 || maybe_le (wi::to_poly_offset (size), off))
2728 return true;
2729 /* Now we are sure the first byte of the access is inside
2730 the object. */
2731 return false;
2733 return true;
2735 case INDIRECT_REF:
2736 return !TREE_THIS_NOTRAP (expr);
2738 case ASM_EXPR:
2739 return TREE_THIS_VOLATILE (expr);
2741 case CALL_EXPR:
2742 /* Internal function calls do not trap. */
2743 if (CALL_EXPR_FN (expr) == NULL_TREE)
2744 return false;
2745 t = get_callee_fndecl (expr);
2746 /* Assume that indirect and calls to weak functions may trap. */
2747 if (!t || !DECL_P (t))
2748 return true;
2749 if (DECL_WEAK (t))
2750 return tree_could_trap_p (t);
2751 return false;
2753 case FUNCTION_DECL:
2754 /* Assume that accesses to weak functions may trap, unless we know
2755 they are certainly defined in current TU or in some other
2756 LTO partition. */
2757 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2759 cgraph_node *node = cgraph_node::get (expr);
2760 if (node)
2761 node = node->function_symbol ();
2762 return !(node && node->in_other_partition);
2764 return false;
2766 case VAR_DECL:
2767 /* Assume that accesses to weak vars may trap, unless we know
2768 they are certainly defined in current TU or in some other
2769 LTO partition. */
2770 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2772 varpool_node *node = varpool_node::get (expr);
2773 if (node)
2774 node = node->ultimate_alias_target ();
2775 return !(node && node->in_other_partition);
2777 return false;
2779 default:
2780 return false;
2784 /* Return non-NULL if there is an integer operation with trapping overflow
2785 we can rewrite into non-trapping. Called via walk_tree from
2786 rewrite_to_non_trapping_overflow. */
2788 static tree
2789 find_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2791 if (EXPR_P (*tp)
2792 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp))
2793 && !operation_no_trapping_overflow (TREE_TYPE (*tp), TREE_CODE (*tp)))
2794 return *tp;
2795 if (IS_TYPE_OR_DECL_P (*tp)
2796 || (TREE_CODE (*tp) == SAVE_EXPR && data == NULL))
2797 *walk_subtrees = 0;
2798 return NULL_TREE;
2801 /* Rewrite selected operations into unsigned arithmetics, so that they
2802 don't trap on overflow. */
2804 static tree
2805 replace_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2807 if (find_trapping_overflow (tp, walk_subtrees, data))
2809 tree type = TREE_TYPE (*tp);
2810 tree utype = unsigned_type_for (type);
2811 *walk_subtrees = 0;
2812 int len = TREE_OPERAND_LENGTH (*tp);
2813 for (int i = 0; i < len; ++i)
2814 walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow,
2815 data, (hash_set<tree> *) data);
2817 if (TREE_CODE (*tp) == ABS_EXPR)
2819 TREE_SET_CODE (*tp, ABSU_EXPR);
2820 TREE_TYPE (*tp) = utype;
2821 *tp = fold_convert (type, *tp);
2823 else
2825 TREE_TYPE (*tp) = utype;
2826 len = TREE_OPERAND_LENGTH (*tp);
2827 for (int i = 0; i < len; ++i)
2828 TREE_OPERAND (*tp, i)
2829 = fold_convert (utype, TREE_OPERAND (*tp, i));
2830 *tp = fold_convert (type, *tp);
2833 return NULL_TREE;
2836 /* If any subexpression of EXPR can trap due to -ftrapv, rewrite it
2837 using unsigned arithmetics to avoid traps in it. */
2839 tree
2840 rewrite_to_non_trapping_overflow (tree expr)
2842 if (!flag_trapv)
2843 return expr;
2844 hash_set<tree> pset;
2845 if (!walk_tree (&expr, find_trapping_overflow, &pset, &pset))
2846 return expr;
2847 expr = unshare_expr (expr);
2848 pset.empty ();
2849 walk_tree (&expr, replace_trapping_overflow, &pset, &pset);
2850 return expr;
2853 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2854 an assignment or a conditional) may throw. */
2856 static bool
2857 stmt_could_throw_1_p (gassign *stmt)
2859 enum tree_code code = gimple_assign_rhs_code (stmt);
2860 bool honor_nans = false;
2861 bool honor_snans = false;
2862 bool fp_operation = false;
2863 bool honor_trapv = false;
2864 tree t;
2865 size_t i;
2866 bool handled, ret;
2868 if (TREE_CODE_CLASS (code) == tcc_comparison
2869 || TREE_CODE_CLASS (code) == tcc_unary
2870 || TREE_CODE_CLASS (code) == tcc_binary)
2872 if (TREE_CODE_CLASS (code) == tcc_comparison)
2873 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2874 else
2875 t = TREE_TYPE (gimple_assign_lhs (stmt));
2876 fp_operation = FLOAT_TYPE_P (t);
2877 if (fp_operation)
2879 honor_nans = flag_trapping_math && !flag_finite_math_only;
2880 honor_snans = flag_signaling_nans != 0;
2882 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2883 honor_trapv = true;
2886 /* First check the LHS. */
2887 if (tree_could_trap_p (gimple_assign_lhs (stmt)))
2888 return true;
2890 /* Check if the main expression may trap. */
2891 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2892 honor_nans, honor_snans,
2893 gimple_assign_rhs2 (stmt),
2894 &handled);
2895 if (handled)
2896 return ret;
2898 /* If the expression does not trap, see if any of the individual operands may
2899 trap. */
2900 for (i = 1; i < gimple_num_ops (stmt); i++)
2901 if (tree_could_trap_p (gimple_op (stmt, i)))
2902 return true;
2904 return false;
2908 /* Return true if statement STMT within FUN could throw an exception. */
2910 bool
2911 stmt_could_throw_p (function *fun, gimple *stmt)
2913 if (!flag_exceptions)
2914 return false;
2916 /* The only statements that can throw an exception are assignments,
2917 conditionals, calls, resx, and asms. */
2918 switch (gimple_code (stmt))
2920 case GIMPLE_RESX:
2921 return true;
2923 case GIMPLE_CALL:
2924 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
2926 case GIMPLE_COND:
2928 if (fun && !fun->can_throw_non_call_exceptions)
2929 return false;
2930 gcond *cond = as_a <gcond *> (stmt);
2931 tree lhs = gimple_cond_lhs (cond);
2932 return operation_could_trap_p (gimple_cond_code (cond),
2933 FLOAT_TYPE_P (TREE_TYPE (lhs)),
2934 false, NULL_TREE);
2937 case GIMPLE_ASSIGN:
2938 if ((fun && !fun->can_throw_non_call_exceptions)
2939 || gimple_clobber_p (stmt))
2940 return false;
2941 return stmt_could_throw_1_p (as_a <gassign *> (stmt));
2943 case GIMPLE_ASM:
2944 if (fun && !fun->can_throw_non_call_exceptions)
2945 return false;
2946 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
2948 default:
2949 return false;
2953 /* Return true if STMT in function FUN must be assumed necessary because of
2954 non-call exceptions. */
2956 bool
2957 stmt_unremovable_because_of_non_call_eh_p (function *fun, gimple *stmt)
2959 return (fun->can_throw_non_call_exceptions
2960 && !fun->can_delete_dead_exceptions
2961 && stmt_could_throw_p (fun, stmt));
2964 /* Return true if expression T could throw an exception. */
2966 bool
2967 tree_could_throw_p (tree t)
2969 if (!flag_exceptions)
2970 return false;
2971 if (TREE_CODE (t) == MODIFY_EXPR)
2973 if (cfun->can_throw_non_call_exceptions
2974 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2975 return true;
2976 t = TREE_OPERAND (t, 1);
2979 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2980 t = TREE_OPERAND (t, 0);
2981 if (TREE_CODE (t) == CALL_EXPR)
2982 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2983 if (cfun->can_throw_non_call_exceptions)
2984 return tree_could_trap_p (t);
2985 return false;
2988 /* Return true if STMT can throw an exception that is not caught within its
2989 function FUN. FUN can be NULL but the function is extra conservative
2990 then. */
2992 bool
2993 stmt_can_throw_external (function *fun, gimple *stmt)
2995 int lp_nr;
2997 if (!stmt_could_throw_p (fun, stmt))
2998 return false;
2999 if (!fun)
3000 return true;
3002 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
3003 return lp_nr == 0;
3006 /* Return true if STMT can throw an exception that is caught within its
3007 function FUN. */
3009 bool
3010 stmt_can_throw_internal (function *fun, gimple *stmt)
3012 int lp_nr;
3014 gcc_checking_assert (fun);
3015 if (!stmt_could_throw_p (fun, stmt))
3016 return false;
3018 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
3019 return lp_nr > 0;
3022 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
3023 remove any entry it might have from the EH table. Return true if
3024 any change was made. */
3026 bool
3027 maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
3029 if (stmt_could_throw_p (ifun, stmt))
3030 return false;
3031 return remove_stmt_from_eh_lp_fn (ifun, stmt);
3034 /* Likewise, but always use the current function. */
3036 bool
3037 maybe_clean_eh_stmt (gimple *stmt)
3039 return maybe_clean_eh_stmt_fn (cfun, stmt);
3042 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
3043 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
3044 in the table if it should be in there. Return TRUE if a replacement was
3045 done that my require an EH edge purge. */
3047 bool
3048 maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
3050 int lp_nr = lookup_stmt_eh_lp (old_stmt);
3052 if (lp_nr != 0)
3054 bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt);
3056 if (new_stmt == old_stmt && new_stmt_could_throw)
3057 return false;
3059 remove_stmt_from_eh_lp (old_stmt);
3060 if (new_stmt_could_throw)
3062 add_stmt_to_eh_lp (new_stmt, lp_nr);
3063 return false;
3065 else
3066 return true;
3069 return false;
3072 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
3073 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
3074 operand is the return value of duplicate_eh_regions. */
3076 bool
3077 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
3078 struct function *old_fun, gimple *old_stmt,
3079 hash_map<void *, void *> *map,
3080 int default_lp_nr)
3082 int old_lp_nr, new_lp_nr;
3084 if (!stmt_could_throw_p (new_fun, new_stmt))
3085 return false;
3087 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
3088 if (old_lp_nr == 0)
3090 if (default_lp_nr == 0)
3091 return false;
3092 new_lp_nr = default_lp_nr;
3094 else if (old_lp_nr > 0)
3096 eh_landing_pad old_lp, new_lp;
3098 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
3099 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
3100 new_lp_nr = new_lp->index;
3102 else
3104 eh_region old_r, new_r;
3106 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
3107 new_r = static_cast<eh_region> (*map->get (old_r));
3108 new_lp_nr = -new_r->index;
3111 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
3112 return true;
3115 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
3116 and thus no remapping is required. */
3118 bool
3119 maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
3121 int lp_nr;
3123 if (!stmt_could_throw_p (cfun, new_stmt))
3124 return false;
3126 lp_nr = lookup_stmt_eh_lp (old_stmt);
3127 if (lp_nr == 0)
3128 return false;
3130 add_stmt_to_eh_lp (new_stmt, lp_nr);
3131 return true;
3134 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3135 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3136 this only handles handlers consisting of a single call, as that's the
3137 important case for C++: a destructor call for a particular object showing
3138 up in multiple handlers. */
3140 static bool
3141 same_handler_p (gimple_seq oneh, gimple_seq twoh)
3143 gimple_stmt_iterator gsi;
3144 gimple *ones, *twos;
3145 unsigned int ai;
3147 gsi = gsi_start (oneh);
3148 if (!gsi_one_before_end_p (gsi))
3149 return false;
3150 ones = gsi_stmt (gsi);
3152 gsi = gsi_start (twoh);
3153 if (!gsi_one_before_end_p (gsi))
3154 return false;
3155 twos = gsi_stmt (gsi);
3157 if (!is_gimple_call (ones)
3158 || !is_gimple_call (twos)
3159 || gimple_call_lhs (ones)
3160 || gimple_call_lhs (twos)
3161 || gimple_call_chain (ones)
3162 || gimple_call_chain (twos)
3163 || !gimple_call_same_target_p (ones, twos)
3164 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3165 return false;
3167 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3168 if (!operand_equal_p (gimple_call_arg (ones, ai),
3169 gimple_call_arg (twos, ai), 0))
3170 return false;
3172 return true;
3175 /* Optimize
3176 try { A() } finally { try { ~B() } catch { ~A() } }
3177 try { ... } finally { ~A() }
3178 into
3179 try { A() } catch { ~B() }
3180 try { ~B() ... } finally { ~A() }
3182 This occurs frequently in C++, where A is a local variable and B is a
3183 temporary used in the initializer for A. */
3185 static void
3186 optimize_double_finally (gtry *one, gtry *two)
3188 gimple *oneh;
3189 gimple_stmt_iterator gsi;
3190 gimple_seq cleanup;
3192 cleanup = gimple_try_cleanup (one);
3193 gsi = gsi_start (cleanup);
3194 if (!gsi_one_before_end_p (gsi))
3195 return;
3197 oneh = gsi_stmt (gsi);
3198 if (gimple_code (oneh) != GIMPLE_TRY
3199 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3200 return;
3202 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3204 gimple_seq seq = gimple_try_eval (oneh);
3206 gimple_try_set_cleanup (one, seq);
3207 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3208 seq = copy_gimple_seq_and_replace_locals (seq);
3209 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3210 gimple_try_set_eval (two, seq);
3214 /* Perform EH refactoring optimizations that are simpler to do when code
3215 flow has been lowered but EH structures haven't. */
3217 static void
3218 refactor_eh_r (gimple_seq seq)
3220 gimple_stmt_iterator gsi;
3221 gimple *one, *two;
3223 one = NULL;
3224 two = NULL;
3225 gsi = gsi_start (seq);
3226 while (1)
3228 one = two;
3229 if (gsi_end_p (gsi))
3230 two = NULL;
3231 else
3232 two = gsi_stmt (gsi);
3233 if (one && two)
3234 if (gtry *try_one = dyn_cast <gtry *> (one))
3235 if (gtry *try_two = dyn_cast <gtry *> (two))
3236 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3237 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3238 optimize_double_finally (try_one, try_two);
3239 if (one)
3240 switch (gimple_code (one))
3242 case GIMPLE_TRY:
3243 refactor_eh_r (gimple_try_eval (one));
3244 refactor_eh_r (gimple_try_cleanup (one));
3245 break;
3246 case GIMPLE_CATCH:
3247 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
3248 break;
3249 case GIMPLE_EH_FILTER:
3250 refactor_eh_r (gimple_eh_filter_failure (one));
3251 break;
3252 case GIMPLE_EH_ELSE:
3254 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3255 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3256 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3258 break;
3259 default:
3260 break;
3262 if (two)
3263 gsi_next (&gsi);
3264 else
3265 break;
3269 namespace {
3271 const pass_data pass_data_refactor_eh =
3273 GIMPLE_PASS, /* type */
3274 "ehopt", /* name */
3275 OPTGROUP_NONE, /* optinfo_flags */
3276 TV_TREE_EH, /* tv_id */
3277 PROP_gimple_lcf, /* properties_required */
3278 0, /* properties_provided */
3279 0, /* properties_destroyed */
3280 0, /* todo_flags_start */
3281 0, /* todo_flags_finish */
3284 class pass_refactor_eh : public gimple_opt_pass
3286 public:
3287 pass_refactor_eh (gcc::context *ctxt)
3288 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3291 /* opt_pass methods: */
3292 virtual bool gate (function *) { return flag_exceptions != 0; }
3293 virtual unsigned int execute (function *)
3295 refactor_eh_r (gimple_body (current_function_decl));
3296 return 0;
3299 }; // class pass_refactor_eh
3301 } // anon namespace
3303 gimple_opt_pass *
3304 make_pass_refactor_eh (gcc::context *ctxt)
3306 return new pass_refactor_eh (ctxt);
3309 /* At the end of gimple optimization, we can lower RESX. */
3311 static bool
3312 lower_resx (basic_block bb, gresx *stmt,
3313 hash_map<eh_region, tree> *mnt_map)
3315 int lp_nr;
3316 eh_region src_r, dst_r;
3317 gimple_stmt_iterator gsi;
3318 gimple *x;
3319 tree fn, src_nr;
3320 bool ret = false;
3322 lp_nr = lookup_stmt_eh_lp (stmt);
3323 if (lp_nr != 0)
3324 dst_r = get_eh_region_from_lp_number (lp_nr);
3325 else
3326 dst_r = NULL;
3328 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3329 gsi = gsi_last_bb (bb);
3331 if (src_r == NULL)
3333 /* We can wind up with no source region when pass_cleanup_eh shows
3334 that there are no entries into an eh region and deletes it, but
3335 then the block that contains the resx isn't removed. This can
3336 happen without optimization when the switch statement created by
3337 lower_try_finally_switch isn't simplified to remove the eh case.
3339 Resolve this by expanding the resx node to an abort. */
3341 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3342 x = gimple_build_call (fn, 0);
3343 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3345 while (EDGE_COUNT (bb->succs) > 0)
3346 remove_edge (EDGE_SUCC (bb, 0));
3348 else if (dst_r)
3350 /* When we have a destination region, we resolve this by copying
3351 the excptr and filter values into place, and changing the edge
3352 to immediately after the landing pad. */
3353 edge e;
3355 if (lp_nr < 0)
3357 basic_block new_bb;
3358 tree lab;
3360 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3361 the failure decl into a new block, if needed. */
3362 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3364 tree *slot = mnt_map->get (dst_r);
3365 if (slot == NULL)
3367 gimple_stmt_iterator gsi2;
3369 new_bb = create_empty_bb (bb);
3370 new_bb->count = bb->count;
3371 add_bb_to_loop (new_bb, bb->loop_father);
3372 lab = gimple_block_label (new_bb);
3373 gsi2 = gsi_start_bb (new_bb);
3375 fn = dst_r->u.must_not_throw.failure_decl;
3376 x = gimple_build_call (fn, 0);
3377 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3378 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3380 mnt_map->put (dst_r, lab);
3382 else
3384 lab = *slot;
3385 new_bb = label_to_block (cfun, lab);
3388 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3389 e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
3391 else
3393 edge_iterator ei;
3394 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3396 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3397 src_nr = build_int_cst (integer_type_node, src_r->index);
3398 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3399 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3401 /* Update the flags for the outgoing edge. */
3402 e = single_succ_edge (bb);
3403 gcc_assert (e->flags & EDGE_EH);
3404 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3405 e->probability = profile_probability::always ();
3407 /* If there are no more EH users of the landing pad, delete it. */
3408 FOR_EACH_EDGE (e, ei, e->dest->preds)
3409 if (e->flags & EDGE_EH)
3410 break;
3411 if (e == NULL)
3413 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3414 remove_eh_landing_pad (lp);
3418 ret = true;
3420 else
3422 tree var;
3424 /* When we don't have a destination region, this exception escapes
3425 up the call chain. We resolve this by generating a call to the
3426 _Unwind_Resume library function. */
3428 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3429 with no arguments for C++. Check for that. */
3430 if (src_r->use_cxa_end_cleanup)
3432 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3433 x = gimple_build_call (fn, 0);
3434 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3436 else
3438 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3439 src_nr = build_int_cst (integer_type_node, src_r->index);
3440 x = gimple_build_call (fn, 1, src_nr);
3441 var = create_tmp_var (ptr_type_node);
3442 var = make_ssa_name (var, x);
3443 gimple_call_set_lhs (x, var);
3444 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3446 /* When exception handling is delegated to a caller function, we
3447 have to guarantee that shadow memory variables living on stack
3448 will be cleaner before control is given to a parent function. */
3449 if (sanitize_flags_p (SANITIZE_ADDRESS))
3451 tree decl
3452 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3453 gimple *g = gimple_build_call (decl, 0);
3454 gimple_set_location (g, gimple_location (stmt));
3455 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3458 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3459 x = gimple_build_call (fn, 1, var);
3460 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3463 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3466 gsi_remove (&gsi, true);
3468 return ret;
3471 namespace {
3473 const pass_data pass_data_lower_resx =
3475 GIMPLE_PASS, /* type */
3476 "resx", /* name */
3477 OPTGROUP_NONE, /* optinfo_flags */
3478 TV_TREE_EH, /* tv_id */
3479 PROP_gimple_lcf, /* properties_required */
3480 0, /* properties_provided */
3481 0, /* properties_destroyed */
3482 0, /* todo_flags_start */
3483 0, /* todo_flags_finish */
3486 class pass_lower_resx : public gimple_opt_pass
3488 public:
3489 pass_lower_resx (gcc::context *ctxt)
3490 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3493 /* opt_pass methods: */
3494 virtual bool gate (function *) { return flag_exceptions != 0; }
3495 virtual unsigned int execute (function *);
3497 }; // class pass_lower_resx
3499 unsigned
3500 pass_lower_resx::execute (function *fun)
3502 basic_block bb;
3503 bool dominance_invalidated = false;
3504 bool any_rewritten = false;
3506 hash_map<eh_region, tree> mnt_map;
3508 FOR_EACH_BB_FN (bb, fun)
3510 gimple *last = last_stmt (bb);
3511 if (last && is_gimple_resx (last))
3513 dominance_invalidated |=
3514 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
3515 any_rewritten = true;
3519 if (dominance_invalidated)
3521 free_dominance_info (CDI_DOMINATORS);
3522 free_dominance_info (CDI_POST_DOMINATORS);
3525 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3528 } // anon namespace
3530 gimple_opt_pass *
3531 make_pass_lower_resx (gcc::context *ctxt)
3533 return new pass_lower_resx (ctxt);
3536 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3537 external throw. */
3539 static void
3540 optimize_clobbers (basic_block bb)
3542 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3543 bool any_clobbers = false;
3544 bool seen_stack_restore = false;
3545 edge_iterator ei;
3546 edge e;
3548 /* Only optimize anything if the bb contains at least one clobber,
3549 ends with resx (checked by caller), optionally contains some
3550 debug stmts or labels, or at most one __builtin_stack_restore
3551 call, and has an incoming EH edge. */
3552 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3554 gimple *stmt = gsi_stmt (gsi);
3555 if (is_gimple_debug (stmt))
3556 continue;
3557 if (gimple_clobber_p (stmt))
3559 any_clobbers = true;
3560 continue;
3562 if (!seen_stack_restore
3563 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3565 seen_stack_restore = true;
3566 continue;
3568 if (gimple_code (stmt) == GIMPLE_LABEL)
3569 break;
3570 return;
3572 if (!any_clobbers)
3573 return;
3574 FOR_EACH_EDGE (e, ei, bb->preds)
3575 if (e->flags & EDGE_EH)
3576 break;
3577 if (e == NULL)
3578 return;
3579 gsi = gsi_last_bb (bb);
3580 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3582 gimple *stmt = gsi_stmt (gsi);
3583 if (!gimple_clobber_p (stmt))
3584 continue;
3585 unlink_stmt_vdef (stmt);
3586 gsi_remove (&gsi, true);
3587 release_defs (stmt);
3591 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3592 internal throw to successor BB.
3593 SUNK, if not NULL, is an array of sequences indexed by basic-block
3594 index to sink to and to pick up sinking opportunities from.
3595 If FOUND_OPPORTUNITY is not NULL then do not perform the optimization
3596 but set *FOUND_OPPORTUNITY to true. */
3598 static int
3599 sink_clobbers (basic_block bb,
3600 gimple_seq *sunk = NULL, bool *found_opportunity = NULL)
3602 edge e;
3603 edge_iterator ei;
3604 gimple_stmt_iterator gsi, dgsi;
3605 basic_block succbb;
3606 bool any_clobbers = false;
3607 unsigned todo = 0;
3609 /* Only optimize if BB has a single EH successor and
3610 all predecessor edges are EH too. */
3611 if (!single_succ_p (bb)
3612 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3613 return 0;
3615 FOR_EACH_EDGE (e, ei, bb->preds)
3617 if ((e->flags & EDGE_EH) == 0)
3618 return 0;
3621 /* And BB contains only CLOBBER stmts before the final
3622 RESX. */
3623 gsi = gsi_last_bb (bb);
3624 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3626 gimple *stmt = gsi_stmt (gsi);
3627 if (is_gimple_debug (stmt))
3628 continue;
3629 if (gimple_code (stmt) == GIMPLE_LABEL)
3630 break;
3631 if (!gimple_clobber_p (stmt))
3632 return 0;
3633 any_clobbers = true;
3635 if (!any_clobbers && (!sunk || gimple_seq_empty_p (sunk[bb->index])))
3636 return 0;
3638 /* If this was a dry run, tell it we found clobbers to sink. */
3639 if (found_opportunity)
3641 *found_opportunity = true;
3642 return 0;
3645 edge succe = single_succ_edge (bb);
3646 succbb = succe->dest;
3648 /* See if there is a virtual PHI node to take an updated virtual
3649 operand from. */
3650 gphi *vphi = NULL;
3651 for (gphi_iterator gpi = gsi_start_phis (succbb);
3652 !gsi_end_p (gpi); gsi_next (&gpi))
3654 tree res = gimple_phi_result (gpi.phi ());
3655 if (virtual_operand_p (res))
3657 vphi = gpi.phi ();
3658 break;
3662 gimple *first_sunk = NULL;
3663 gimple *last_sunk = NULL;
3664 if (sunk && !(succbb->flags & BB_VISITED))
3665 dgsi = gsi_start (sunk[succbb->index]);
3666 else
3667 dgsi = gsi_after_labels (succbb);
3668 gsi = gsi_last_bb (bb);
3669 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3671 gimple *stmt = gsi_stmt (gsi);
3672 tree lhs;
3673 if (is_gimple_debug (stmt))
3674 continue;
3675 if (gimple_code (stmt) == GIMPLE_LABEL)
3676 break;
3677 lhs = gimple_assign_lhs (stmt);
3678 /* Unfortunately we don't have dominance info updated at this
3679 point, so checking if
3680 dominated_by_p (CDI_DOMINATORS, succbb,
3681 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3682 would be too costly. Thus, avoid sinking any clobbers that
3683 refer to non-(D) SSA_NAMEs. */
3684 if (TREE_CODE (lhs) == MEM_REF
3685 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3686 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3688 unlink_stmt_vdef (stmt);
3689 gsi_remove (&gsi, true);
3690 release_defs (stmt);
3691 continue;
3694 /* As we do not change stmt order when sinking across a
3695 forwarder edge we can keep virtual operands in place. */
3696 gsi_remove (&gsi, false);
3697 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3698 if (!first_sunk)
3699 first_sunk = stmt;
3700 last_sunk = stmt;
3702 if (sunk && !gimple_seq_empty_p (sunk[bb->index]))
3704 if (!first_sunk)
3705 first_sunk = gsi_stmt (gsi_last (sunk[bb->index]));
3706 last_sunk = gsi_stmt (gsi_start (sunk[bb->index]));
3707 gsi_insert_seq_before_without_update (&dgsi,
3708 sunk[bb->index], GSI_NEW_STMT);
3709 sunk[bb->index] = NULL;
3711 if (first_sunk)
3713 /* Adjust virtual operands if we sunk across a virtual PHI. */
3714 if (vphi)
3716 imm_use_iterator iter;
3717 use_operand_p use_p;
3718 gimple *use_stmt;
3719 tree phi_def = gimple_phi_result (vphi);
3720 FOR_EACH_IMM_USE_STMT (use_stmt, iter, phi_def)
3721 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3722 SET_USE (use_p, gimple_vdef (first_sunk));
3723 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def))
3725 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (first_sunk)) = 1;
3726 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def) = 0;
3728 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe),
3729 gimple_vuse (last_sunk));
3730 SET_USE (gimple_vuse_op (last_sunk), phi_def);
3732 /* If there isn't a single predecessor but no virtual PHI node
3733 arrange for virtual operands to be renamed. */
3734 else if (!single_pred_p (succbb)
3735 && TREE_CODE (gimple_vuse (last_sunk)) == SSA_NAME)
3737 mark_virtual_operand_for_renaming (gimple_vuse (last_sunk));
3738 todo |= TODO_update_ssa_only_virtuals;
3742 return todo;
3745 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3746 we have found some duplicate labels and removed some edges. */
3748 static bool
3749 lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3751 gimple_stmt_iterator gsi;
3752 int region_nr;
3753 eh_region r;
3754 tree filter, fn;
3755 gimple *x;
3756 bool redirected = false;
3758 region_nr = gimple_eh_dispatch_region (stmt);
3759 r = get_eh_region_from_number (region_nr);
3761 gsi = gsi_last_bb (src);
3763 switch (r->type)
3765 case ERT_TRY:
3767 auto_vec<tree> labels;
3768 tree default_label = NULL;
3769 eh_catch c;
3770 edge_iterator ei;
3771 edge e;
3772 hash_set<tree> seen_values;
3774 /* Collect the labels for a switch. Zero the post_landing_pad
3775 field becase we'll no longer have anything keeping these labels
3776 in existence and the optimizer will be free to merge these
3777 blocks at will. */
3778 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3780 tree tp_node, flt_node, lab = c->label;
3781 bool have_label = false;
3783 c->label = NULL;
3784 tp_node = c->type_list;
3785 flt_node = c->filter_list;
3787 if (tp_node == NULL)
3789 default_label = lab;
3790 break;
3794 /* Filter out duplicate labels that arise when this handler
3795 is shadowed by an earlier one. When no labels are
3796 attached to the handler anymore, we remove
3797 the corresponding edge and then we delete unreachable
3798 blocks at the end of this pass. */
3799 if (! seen_values.contains (TREE_VALUE (flt_node)))
3801 tree t = build_case_label (TREE_VALUE (flt_node),
3802 NULL, lab);
3803 labels.safe_push (t);
3804 seen_values.add (TREE_VALUE (flt_node));
3805 have_label = true;
3808 tp_node = TREE_CHAIN (tp_node);
3809 flt_node = TREE_CHAIN (flt_node);
3811 while (tp_node);
3812 if (! have_label)
3814 remove_edge (find_edge (src, label_to_block (cfun, lab)));
3815 redirected = true;
3819 /* Clean up the edge flags. */
3820 FOR_EACH_EDGE (e, ei, src->succs)
3822 if (e->flags & EDGE_FALLTHRU)
3824 /* If there was no catch-all, use the fallthru edge. */
3825 if (default_label == NULL)
3826 default_label = gimple_block_label (e->dest);
3827 e->flags &= ~EDGE_FALLTHRU;
3830 gcc_assert (default_label != NULL);
3832 /* Don't generate a switch if there's only a default case.
3833 This is common in the form of try { A; } catch (...) { B; }. */
3834 if (!labels.exists ())
3836 e = single_succ_edge (src);
3837 e->flags |= EDGE_FALLTHRU;
3839 else
3841 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3842 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3843 region_nr));
3844 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3845 filter = make_ssa_name (filter, x);
3846 gimple_call_set_lhs (x, filter);
3847 gimple_set_location (x, gimple_location (stmt));
3848 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3850 /* Turn the default label into a default case. */
3851 default_label = build_case_label (NULL, NULL, default_label);
3852 sort_case_labels (labels);
3854 x = gimple_build_switch (filter, default_label, labels);
3855 gimple_set_location (x, gimple_location (stmt));
3856 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3859 break;
3861 case ERT_ALLOWED_EXCEPTIONS:
3863 edge b_e = BRANCH_EDGE (src);
3864 edge f_e = FALLTHRU_EDGE (src);
3866 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3867 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3868 region_nr));
3869 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3870 filter = make_ssa_name (filter, x);
3871 gimple_call_set_lhs (x, filter);
3872 gimple_set_location (x, gimple_location (stmt));
3873 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3875 r->u.allowed.label = NULL;
3876 x = gimple_build_cond (EQ_EXPR, filter,
3877 build_int_cst (TREE_TYPE (filter),
3878 r->u.allowed.filter),
3879 NULL_TREE, NULL_TREE);
3880 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3882 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3883 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3885 break;
3887 default:
3888 gcc_unreachable ();
3891 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3892 gsi_remove (&gsi, true);
3893 return redirected;
3896 namespace {
3898 const pass_data pass_data_lower_eh_dispatch =
3900 GIMPLE_PASS, /* type */
3901 "ehdisp", /* name */
3902 OPTGROUP_NONE, /* optinfo_flags */
3903 TV_TREE_EH, /* tv_id */
3904 PROP_gimple_lcf, /* properties_required */
3905 0, /* properties_provided */
3906 0, /* properties_destroyed */
3907 0, /* todo_flags_start */
3908 0, /* todo_flags_finish */
3911 class pass_lower_eh_dispatch : public gimple_opt_pass
3913 public:
3914 pass_lower_eh_dispatch (gcc::context *ctxt)
3915 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3918 /* opt_pass methods: */
3919 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3920 virtual unsigned int execute (function *);
3922 }; // class pass_lower_eh_dispatch
3924 unsigned
3925 pass_lower_eh_dispatch::execute (function *fun)
3927 basic_block bb;
3928 int flags = 0;
3929 bool redirected = false;
3930 bool any_resx_to_process = false;
3932 assign_filter_values ();
3934 FOR_EACH_BB_FN (bb, fun)
3936 gimple *last = last_stmt (bb);
3937 if (last == NULL)
3938 continue;
3939 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3941 redirected |= lower_eh_dispatch (bb,
3942 as_a <geh_dispatch *> (last));
3943 flags |= TODO_update_ssa_only_virtuals;
3945 else if (gimple_code (last) == GIMPLE_RESX)
3947 if (stmt_can_throw_external (fun, last))
3948 optimize_clobbers (bb);
3949 else if (!any_resx_to_process)
3950 sink_clobbers (bb, NULL, &any_resx_to_process);
3952 bb->flags &= ~BB_VISITED;
3954 if (redirected)
3956 free_dominance_info (CDI_DOMINATORS);
3957 delete_unreachable_blocks ();
3960 if (any_resx_to_process)
3962 /* Make sure to catch all secondary sinking opportunities by processing
3963 blocks in RPO order and after all CFG modifications from lowering
3964 and unreachable block removal. */
3965 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3966 int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
3967 gimple_seq *sunk = XCNEWVEC (gimple_seq, last_basic_block_for_fn (fun));
3968 for (int i = 0; i < rpo_n; ++i)
3970 bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
3971 gimple *last = last_stmt (bb);
3972 if (last
3973 && gimple_code (last) == GIMPLE_RESX
3974 && !stmt_can_throw_external (fun, last))
3975 flags |= sink_clobbers (bb, sunk);
3976 /* If there were any clobbers sunk into this BB, insert them now. */
3977 if (!gimple_seq_empty_p (sunk[bb->index]))
3979 gimple_stmt_iterator gsi = gsi_after_labels (bb);
3980 gsi_insert_seq_before (&gsi, sunk[bb->index], GSI_NEW_STMT);
3981 sunk[bb->index] = NULL;
3983 bb->flags |= BB_VISITED;
3985 free (rpo);
3986 free (sunk);
3989 return flags;
3992 } // anon namespace
3994 gimple_opt_pass *
3995 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3997 return new pass_lower_eh_dispatch (ctxt);
4000 /* Walk statements, see what regions and, optionally, landing pads
4001 are really referenced.
4003 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
4004 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
4006 Passing NULL for LP_REACHABLE is valid, in this case only reachable
4007 regions are marked.
4009 The caller is responsible for freeing the returned sbitmaps. */
4011 static void
4012 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
4014 sbitmap r_reachable, lp_reachable;
4015 basic_block bb;
4016 bool mark_landing_pads = (lp_reachablep != NULL);
4017 gcc_checking_assert (r_reachablep != NULL);
4019 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
4020 bitmap_clear (r_reachable);
4021 *r_reachablep = r_reachable;
4023 if (mark_landing_pads)
4025 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
4026 bitmap_clear (lp_reachable);
4027 *lp_reachablep = lp_reachable;
4029 else
4030 lp_reachable = NULL;
4032 FOR_EACH_BB_FN (bb, cfun)
4034 gimple_stmt_iterator gsi;
4036 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4038 gimple *stmt = gsi_stmt (gsi);
4040 if (mark_landing_pads)
4042 int lp_nr = lookup_stmt_eh_lp (stmt);
4044 /* Negative LP numbers are MUST_NOT_THROW regions which
4045 are not considered BB enders. */
4046 if (lp_nr < 0)
4047 bitmap_set_bit (r_reachable, -lp_nr);
4049 /* Positive LP numbers are real landing pads, and BB enders. */
4050 else if (lp_nr > 0)
4052 gcc_assert (gsi_one_before_end_p (gsi));
4053 eh_region region = get_eh_region_from_lp_number (lp_nr);
4054 bitmap_set_bit (r_reachable, region->index);
4055 bitmap_set_bit (lp_reachable, lp_nr);
4059 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
4060 switch (gimple_code (stmt))
4062 case GIMPLE_RESX:
4063 bitmap_set_bit (r_reachable,
4064 gimple_resx_region (as_a <gresx *> (stmt)));
4065 break;
4066 case GIMPLE_EH_DISPATCH:
4067 bitmap_set_bit (r_reachable,
4068 gimple_eh_dispatch_region (
4069 as_a <geh_dispatch *> (stmt)));
4070 break;
4071 case GIMPLE_CALL:
4072 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
4073 for (int i = 0; i < 2; ++i)
4075 tree rt = gimple_call_arg (stmt, i);
4076 HOST_WIDE_INT ri = tree_to_shwi (rt);
4078 gcc_assert (ri == (int)ri);
4079 bitmap_set_bit (r_reachable, ri);
4081 break;
4082 default:
4083 break;
4089 /* Remove unreachable handlers and unreachable landing pads. */
4091 static void
4092 remove_unreachable_handlers (void)
4094 sbitmap r_reachable, lp_reachable;
4095 eh_region region;
4096 eh_landing_pad lp;
4097 unsigned i;
4099 mark_reachable_handlers (&r_reachable, &lp_reachable);
4101 if (dump_file)
4103 fprintf (dump_file, "Before removal of unreachable regions:\n");
4104 dump_eh_tree (dump_file, cfun);
4105 fprintf (dump_file, "Reachable regions: ");
4106 dump_bitmap_file (dump_file, r_reachable);
4107 fprintf (dump_file, "Reachable landing pads: ");
4108 dump_bitmap_file (dump_file, lp_reachable);
4111 if (dump_file)
4113 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4114 if (region && !bitmap_bit_p (r_reachable, region->index))
4115 fprintf (dump_file,
4116 "Removing unreachable region %d\n",
4117 region->index);
4120 remove_unreachable_eh_regions (r_reachable);
4122 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
4123 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
4125 if (dump_file)
4126 fprintf (dump_file,
4127 "Removing unreachable landing pad %d\n",
4128 lp->index);
4129 remove_eh_landing_pad (lp);
4132 if (dump_file)
4134 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
4135 dump_eh_tree (dump_file, cfun);
4136 fprintf (dump_file, "\n\n");
4139 sbitmap_free (r_reachable);
4140 sbitmap_free (lp_reachable);
4142 if (flag_checking)
4143 verify_eh_tree (cfun);
4146 /* Remove unreachable handlers if any landing pads have been removed after
4147 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
4149 void
4150 maybe_remove_unreachable_handlers (void)
4152 eh_landing_pad lp;
4153 unsigned i;
4155 if (cfun->eh == NULL)
4156 return;
4158 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
4159 if (lp
4160 && (lp->post_landing_pad == NULL_TREE
4161 || label_to_block (cfun, lp->post_landing_pad) == NULL))
4163 remove_unreachable_handlers ();
4164 return;
4168 /* Remove regions that do not have landing pads. This assumes
4169 that remove_unreachable_handlers has already been run, and
4170 that we've just manipulated the landing pads since then.
4172 Preserve regions with landing pads and regions that prevent
4173 exceptions from propagating further, even if these regions
4174 are not reachable. */
4176 static void
4177 remove_unreachable_handlers_no_lp (void)
4179 eh_region region;
4180 sbitmap r_reachable;
4181 unsigned i;
4183 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
4185 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4187 if (! region)
4188 continue;
4190 if (region->landing_pads != NULL
4191 || region->type == ERT_MUST_NOT_THROW)
4192 bitmap_set_bit (r_reachable, region->index);
4194 if (dump_file
4195 && !bitmap_bit_p (r_reachable, region->index))
4196 fprintf (dump_file,
4197 "Removing unreachable region %d\n",
4198 region->index);
4201 remove_unreachable_eh_regions (r_reachable);
4203 sbitmap_free (r_reachable);
4206 /* Undo critical edge splitting on an EH landing pad. Earlier, we
4207 optimisticaly split all sorts of edges, including EH edges. The
4208 optimization passes in between may not have needed them; if not,
4209 we should undo the split.
4211 Recognize this case by having one EH edge incoming to the BB and
4212 one normal edge outgoing; BB should be empty apart from the
4213 post_landing_pad label.
4215 Note that this is slightly different from the empty handler case
4216 handled by cleanup_empty_eh, in that the actual handler may yet
4217 have actual code but the landing pad has been separated from the
4218 handler. As such, cleanup_empty_eh relies on this transformation
4219 having been done first. */
4221 static bool
4222 unsplit_eh (eh_landing_pad lp)
4224 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
4225 gimple_stmt_iterator gsi;
4226 edge e_in, e_out;
4228 /* Quickly check the edge counts on BB for singularity. */
4229 if (!single_pred_p (bb) || !single_succ_p (bb))
4230 return false;
4231 e_in = single_pred_edge (bb);
4232 e_out = single_succ_edge (bb);
4234 /* Input edge must be EH and output edge must be normal. */
4235 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4236 return false;
4238 /* The block must be empty except for the labels and debug insns. */
4239 gsi = gsi_after_labels (bb);
4240 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4241 gsi_next_nondebug (&gsi);
4242 if (!gsi_end_p (gsi))
4243 return false;
4245 /* The destination block must not already have a landing pad
4246 for a different region. */
4247 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4249 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4250 tree lab;
4251 int lp_nr;
4253 if (!label_stmt)
4254 break;
4255 lab = gimple_label_label (label_stmt);
4256 lp_nr = EH_LANDING_PAD_NR (lab);
4257 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4258 return false;
4261 /* The new destination block must not already be a destination of
4262 the source block, lest we merge fallthru and eh edges and get
4263 all sorts of confused. */
4264 if (find_edge (e_in->src, e_out->dest))
4265 return false;
4267 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4268 thought this should have been cleaned up by a phicprop pass, but
4269 that doesn't appear to handle virtuals. Propagate by hand. */
4270 if (!gimple_seq_empty_p (phi_nodes (bb)))
4272 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
4274 gimple *use_stmt;
4275 gphi *phi = gpi.phi ();
4276 tree lhs = gimple_phi_result (phi);
4277 tree rhs = gimple_phi_arg_def (phi, 0);
4278 use_operand_p use_p;
4279 imm_use_iterator iter;
4281 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4283 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4284 SET_USE (use_p, rhs);
4287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4288 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4290 remove_phi_node (&gpi, true);
4294 if (dump_file && (dump_flags & TDF_DETAILS))
4295 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4296 lp->index, e_out->dest->index);
4298 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4299 a successor edge, humor it. But do the real CFG change with the
4300 predecessor of E_OUT in order to preserve the ordering of arguments
4301 to the PHI nodes in E_OUT->DEST. */
4302 redirect_eh_edge_1 (e_in, e_out->dest, false);
4303 redirect_edge_pred (e_out, e_in->src);
4304 e_out->flags = e_in->flags;
4305 e_out->probability = e_in->probability;
4306 remove_edge (e_in);
4308 return true;
4311 /* Examine each landing pad block and see if it matches unsplit_eh. */
4313 static bool
4314 unsplit_all_eh (void)
4316 bool changed = false;
4317 eh_landing_pad lp;
4318 int i;
4320 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4321 if (lp)
4322 changed |= unsplit_eh (lp);
4324 return changed;
4327 /* Wrapper around unsplit_all_eh that makes it usable everywhere. */
4329 void
4330 unsplit_eh_edges (void)
4332 bool changed;
4334 /* unsplit_all_eh can die looking up unreachable landing pads. */
4335 maybe_remove_unreachable_handlers ();
4337 changed = unsplit_all_eh ();
4339 /* If EH edges have been unsplit, delete unreachable forwarder blocks. */
4340 if (changed)
4342 free_dominance_info (CDI_DOMINATORS);
4343 free_dominance_info (CDI_POST_DOMINATORS);
4344 delete_unreachable_blocks ();
4348 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4349 to OLD_BB to NEW_BB; return true on success, false on failure.
4351 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4352 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4353 Virtual PHIs may be deleted and marked for renaming. */
4355 static bool
4356 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4357 edge old_bb_out, bool change_region)
4359 gphi_iterator ngsi, ogsi;
4360 edge_iterator ei;
4361 edge e;
4362 bitmap ophi_handled;
4364 /* The destination block must not be a regular successor for any
4365 of the preds of the landing pad. Thus, avoid turning
4366 <..>
4367 | \ EH
4368 | <..>
4370 <..>
4371 into
4372 <..>
4373 | | EH
4374 <..>
4375 which CFG verification would choke on. See PR45172 and PR51089. */
4376 if (!single_pred_p (new_bb))
4377 FOR_EACH_EDGE (e, ei, old_bb->preds)
4378 if (find_edge (e->src, new_bb))
4379 return false;
4381 FOR_EACH_EDGE (e, ei, old_bb->preds)
4382 redirect_edge_var_map_clear (e);
4384 ophi_handled = BITMAP_ALLOC (NULL);
4386 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4387 for the edges we're going to move. */
4388 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4390 gphi *ophi, *nphi = ngsi.phi ();
4391 tree nresult, nop;
4393 nresult = gimple_phi_result (nphi);
4394 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4396 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4397 the source ssa_name. */
4398 ophi = NULL;
4399 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4401 ophi = ogsi.phi ();
4402 if (gimple_phi_result (ophi) == nop)
4403 break;
4404 ophi = NULL;
4407 /* If we did find the corresponding PHI, copy those inputs. */
4408 if (ophi)
4410 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4411 if (!has_single_use (nop))
4413 imm_use_iterator imm_iter;
4414 use_operand_p use_p;
4416 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4418 if (!gimple_debug_bind_p (USE_STMT (use_p))
4419 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4420 || gimple_bb (USE_STMT (use_p)) != new_bb))
4421 goto fail;
4424 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4425 FOR_EACH_EDGE (e, ei, old_bb->preds)
4427 location_t oloc;
4428 tree oop;
4430 if ((e->flags & EDGE_EH) == 0)
4431 continue;
4432 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4433 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4434 redirect_edge_var_map_add (e, nresult, oop, oloc);
4437 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4438 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4439 variable is unchanged from input to the block and we can simply
4440 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4441 else
4443 location_t nloc
4444 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4445 FOR_EACH_EDGE (e, ei, old_bb->preds)
4446 redirect_edge_var_map_add (e, nresult, nop, nloc);
4450 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4451 we don't know what values from the other edges into NEW_BB to use. */
4452 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4454 gphi *ophi = ogsi.phi ();
4455 tree oresult = gimple_phi_result (ophi);
4456 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4457 goto fail;
4460 /* Finally, move the edges and update the PHIs. */
4461 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4462 if (e->flags & EDGE_EH)
4464 /* ??? CFG manipluation routines do not try to update loop
4465 form on edge redirection. Do so manually here for now. */
4466 /* If we redirect a loop entry or latch edge that will either create
4467 a multiple entry loop or rotate the loop. If the loops merge
4468 we may have created a loop with multiple latches.
4469 All of this isn't easily fixed thus cancel the affected loop
4470 and mark the other loop as possibly having multiple latches. */
4471 if (e->dest == e->dest->loop_father->header)
4473 mark_loop_for_removal (e->dest->loop_father);
4474 new_bb->loop_father->latch = NULL;
4475 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4477 redirect_eh_edge_1 (e, new_bb, change_region);
4478 redirect_edge_succ (e, new_bb);
4479 flush_pending_stmts (e);
4481 else
4482 ei_next (&ei);
4484 BITMAP_FREE (ophi_handled);
4485 return true;
4487 fail:
4488 FOR_EACH_EDGE (e, ei, old_bb->preds)
4489 redirect_edge_var_map_clear (e);
4490 BITMAP_FREE (ophi_handled);
4491 return false;
4494 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4495 old region to NEW_REGION at BB. */
4497 static void
4498 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4499 eh_landing_pad lp, eh_region new_region)
4501 gimple_stmt_iterator gsi;
4502 eh_landing_pad *pp;
4504 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4505 continue;
4506 *pp = lp->next_lp;
4508 lp->region = new_region;
4509 lp->next_lp = new_region->landing_pads;
4510 new_region->landing_pads = lp;
4512 /* Delete the RESX that was matched within the empty handler block. */
4513 gsi = gsi_last_bb (bb);
4514 unlink_stmt_vdef (gsi_stmt (gsi));
4515 gsi_remove (&gsi, true);
4517 /* Clean up E_OUT for the fallthru. */
4518 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4519 e_out->probability = profile_probability::always ();
4522 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4523 unsplitting than unsplit_eh was prepared to handle, e.g. when
4524 multiple incoming edges and phis are involved. */
4526 static bool
4527 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4529 gimple_stmt_iterator gsi;
4530 tree lab;
4532 /* We really ought not have totally lost everything following
4533 a landing pad label. Given that BB is empty, there had better
4534 be a successor. */
4535 gcc_assert (e_out != NULL);
4537 /* The destination block must not already have a landing pad
4538 for a different region. */
4539 lab = NULL;
4540 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4542 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4543 int lp_nr;
4545 if (!stmt)
4546 break;
4547 lab = gimple_label_label (stmt);
4548 lp_nr = EH_LANDING_PAD_NR (lab);
4549 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4550 return false;
4553 /* Attempt to move the PHIs into the successor block. */
4554 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4556 if (dump_file && (dump_flags & TDF_DETAILS))
4557 fprintf (dump_file,
4558 "Unsplit EH landing pad %d to block %i "
4559 "(via cleanup_empty_eh).\n",
4560 lp->index, e_out->dest->index);
4561 return true;
4564 return false;
4567 /* Return true if edge E_FIRST is part of an empty infinite loop
4568 or leads to such a loop through a series of single successor
4569 empty bbs. */
4571 static bool
4572 infinite_empty_loop_p (edge e_first)
4574 bool inf_loop = false;
4575 edge e;
4577 if (e_first->dest == e_first->src)
4578 return true;
4580 e_first->src->aux = (void *) 1;
4581 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4583 gimple_stmt_iterator gsi;
4584 if (e->dest->aux)
4586 inf_loop = true;
4587 break;
4589 e->dest->aux = (void *) 1;
4590 gsi = gsi_after_labels (e->dest);
4591 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4592 gsi_next_nondebug (&gsi);
4593 if (!gsi_end_p (gsi))
4594 break;
4596 e_first->src->aux = NULL;
4597 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4598 e->dest->aux = NULL;
4600 return inf_loop;
4603 /* Examine the block associated with LP to determine if it's an empty
4604 handler for its EH region. If so, attempt to redirect EH edges to
4605 an outer region. Return true the CFG was updated in any way. This
4606 is similar to jump forwarding, just across EH edges. */
4608 static bool
4609 cleanup_empty_eh (eh_landing_pad lp)
4611 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
4612 gimple_stmt_iterator gsi;
4613 gimple *resx;
4614 eh_region new_region;
4615 edge_iterator ei;
4616 edge e, e_out;
4617 bool has_non_eh_pred;
4618 bool ret = false;
4619 int new_lp_nr;
4621 /* There can be zero or one edges out of BB. This is the quickest test. */
4622 switch (EDGE_COUNT (bb->succs))
4624 case 0:
4625 e_out = NULL;
4626 break;
4627 case 1:
4628 e_out = single_succ_edge (bb);
4629 break;
4630 default:
4631 return false;
4634 gsi = gsi_last_nondebug_bb (bb);
4635 resx = gsi_stmt (gsi);
4636 if (resx && is_gimple_resx (resx))
4638 if (stmt_can_throw_external (cfun, resx))
4639 optimize_clobbers (bb);
4640 else if (sink_clobbers (bb))
4641 ret = true;
4644 gsi = gsi_after_labels (bb);
4646 /* Make sure to skip debug statements. */
4647 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4648 gsi_next_nondebug (&gsi);
4650 /* If the block is totally empty, look for more unsplitting cases. */
4651 if (gsi_end_p (gsi))
4653 /* For the degenerate case of an infinite loop bail out.
4654 If bb has no successors and is totally empty, which can happen e.g.
4655 because of incorrect noreturn attribute, bail out too. */
4656 if (e_out == NULL
4657 || infinite_empty_loop_p (e_out))
4658 return ret;
4660 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4663 /* The block should consist only of a single RESX statement, modulo a
4664 preceding call to __builtin_stack_restore if there is no outgoing
4665 edge, since the call can be eliminated in this case. */
4666 resx = gsi_stmt (gsi);
4667 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4669 gsi_next_nondebug (&gsi);
4670 resx = gsi_stmt (gsi);
4672 if (!is_gimple_resx (resx))
4673 return ret;
4674 gcc_assert (gsi_one_nondebug_before_end_p (gsi));
4676 /* Determine if there are non-EH edges, or resx edges into the handler. */
4677 has_non_eh_pred = false;
4678 FOR_EACH_EDGE (e, ei, bb->preds)
4679 if (!(e->flags & EDGE_EH))
4680 has_non_eh_pred = true;
4682 /* Find the handler that's outer of the empty handler by looking at
4683 where the RESX instruction was vectored. */
4684 new_lp_nr = lookup_stmt_eh_lp (resx);
4685 new_region = get_eh_region_from_lp_number (new_lp_nr);
4687 /* If there's no destination region within the current function,
4688 redirection is trivial via removing the throwing statements from
4689 the EH region, removing the EH edges, and allowing the block
4690 to go unreachable. */
4691 if (new_region == NULL)
4693 gcc_assert (e_out == NULL);
4694 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4695 if (e->flags & EDGE_EH)
4697 gimple *stmt = last_stmt (e->src);
4698 remove_stmt_from_eh_lp (stmt);
4699 remove_edge (e);
4701 else
4702 ei_next (&ei);
4703 goto succeed;
4706 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4707 to handle the abort and allow the blocks to go unreachable. */
4708 if (new_region->type == ERT_MUST_NOT_THROW)
4710 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4711 if (e->flags & EDGE_EH)
4713 gimple *stmt = last_stmt (e->src);
4714 remove_stmt_from_eh_lp (stmt);
4715 add_stmt_to_eh_lp (stmt, new_lp_nr);
4716 remove_edge (e);
4718 else
4719 ei_next (&ei);
4720 goto succeed;
4723 /* Try to redirect the EH edges and merge the PHIs into the destination
4724 landing pad block. If the merge succeeds, we'll already have redirected
4725 all the EH edges. The handler itself will go unreachable if there were
4726 no normal edges. */
4727 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4728 goto succeed;
4730 /* Finally, if all input edges are EH edges, then we can (potentially)
4731 reduce the number of transfers from the runtime by moving the landing
4732 pad from the original region to the new region. This is a win when
4733 we remove the last CLEANUP region along a particular exception
4734 propagation path. Since nothing changes except for the region with
4735 which the landing pad is associated, the PHI nodes do not need to be
4736 adjusted at all. */
4737 if (!has_non_eh_pred)
4739 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4740 if (dump_file && (dump_flags & TDF_DETAILS))
4741 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4742 lp->index, new_region->index);
4744 /* ??? The CFG didn't change, but we may have rendered the
4745 old EH region unreachable. Trigger a cleanup there. */
4746 return true;
4749 return ret;
4751 succeed:
4752 if (dump_file && (dump_flags & TDF_DETAILS))
4753 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4754 remove_eh_landing_pad (lp);
4755 return true;
4758 /* Do a post-order traversal of the EH region tree. Examine each
4759 post_landing_pad block and see if we can eliminate it as empty. */
4761 static bool
4762 cleanup_all_empty_eh (void)
4764 bool changed = false;
4765 eh_landing_pad lp;
4766 int i;
4768 /* The post-order traversal may lead to quadraticness in the redirection
4769 of incoming EH edges from inner LPs, so first try to walk the region
4770 tree from inner to outer LPs in order to eliminate these edges. */
4771 for (i = vec_safe_length (cfun->eh->lp_array) - 1; i >= 1; --i)
4773 lp = (*cfun->eh->lp_array)[i];
4774 if (lp)
4775 changed |= cleanup_empty_eh (lp);
4778 /* Now do the post-order traversal to eliminate outer empty LPs. */
4779 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4780 if (lp)
4781 changed |= cleanup_empty_eh (lp);
4783 return changed;
4786 /* Perform cleanups and lowering of exception handling
4787 1) cleanups regions with handlers doing nothing are optimized out
4788 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4789 3) Info about regions that are containing instructions, and regions
4790 reachable via local EH edges is collected
4791 4) Eh tree is pruned for regions no longer necessary.
4793 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4794 Unify those that have the same failure decl and locus.
4797 static unsigned int
4798 execute_cleanup_eh_1 (void)
4800 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4801 looking up unreachable landing pads. */
4802 remove_unreachable_handlers ();
4804 /* Watch out for the region tree vanishing due to all unreachable. */
4805 if (cfun->eh->region_tree)
4807 bool changed = false;
4809 if (optimize)
4810 changed |= unsplit_all_eh ();
4811 changed |= cleanup_all_empty_eh ();
4813 if (changed)
4815 free_dominance_info (CDI_DOMINATORS);
4816 free_dominance_info (CDI_POST_DOMINATORS);
4818 /* We delayed all basic block deletion, as we may have performed
4819 cleanups on EH edges while non-EH edges were still present. */
4820 delete_unreachable_blocks ();
4822 /* We manipulated the landing pads. Remove any region that no
4823 longer has a landing pad. */
4824 remove_unreachable_handlers_no_lp ();
4826 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4830 return 0;
4833 namespace {
4835 const pass_data pass_data_cleanup_eh =
4837 GIMPLE_PASS, /* type */
4838 "ehcleanup", /* name */
4839 OPTGROUP_NONE, /* optinfo_flags */
4840 TV_TREE_EH, /* tv_id */
4841 PROP_gimple_lcf, /* properties_required */
4842 0, /* properties_provided */
4843 0, /* properties_destroyed */
4844 0, /* todo_flags_start */
4845 0, /* todo_flags_finish */
4848 class pass_cleanup_eh : public gimple_opt_pass
4850 public:
4851 pass_cleanup_eh (gcc::context *ctxt)
4852 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4855 /* opt_pass methods: */
4856 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4857 virtual bool gate (function *fun)
4859 return fun->eh != NULL && fun->eh->region_tree != NULL;
4862 virtual unsigned int execute (function *);
4864 }; // class pass_cleanup_eh
4866 unsigned int
4867 pass_cleanup_eh::execute (function *fun)
4869 int ret = execute_cleanup_eh_1 ();
4871 /* If the function no longer needs an EH personality routine
4872 clear it. This exposes cross-language inlining opportunities
4873 and avoids references to a never defined personality routine. */
4874 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4875 && function_needs_eh_personality (fun) != eh_personality_lang)
4876 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4878 return ret;
4881 } // anon namespace
4883 gimple_opt_pass *
4884 make_pass_cleanup_eh (gcc::context *ctxt)
4886 return new pass_cleanup_eh (ctxt);
4889 /* Disable warnings about missing quoting in GCC diagnostics for
4890 the verification errors. Their format strings don't follow GCC
4891 diagnostic conventions but are only used for debugging. */
4892 #if __GNUC__ >= 10
4893 # pragma GCC diagnostic push
4894 # pragma GCC diagnostic ignored "-Wformat-diag"
4895 #endif
4897 /* Verify that BB containing STMT as the last statement, has precisely the
4898 edge that make_eh_edges would create. */
4900 DEBUG_FUNCTION bool
4901 verify_eh_edges (gimple *stmt)
4903 basic_block bb = gimple_bb (stmt);
4904 eh_landing_pad lp = NULL;
4905 int lp_nr;
4906 edge_iterator ei;
4907 edge e, eh_edge;
4909 lp_nr = lookup_stmt_eh_lp (stmt);
4910 if (lp_nr > 0)
4911 lp = get_eh_landing_pad_from_number (lp_nr);
4913 eh_edge = NULL;
4914 FOR_EACH_EDGE (e, ei, bb->succs)
4916 if (e->flags & EDGE_EH)
4918 if (eh_edge)
4920 error ("BB %i has multiple EH edges", bb->index);
4921 return true;
4923 else
4924 eh_edge = e;
4928 if (lp == NULL)
4930 if (eh_edge)
4932 error ("BB %i cannot throw but has an EH edge", bb->index);
4933 return true;
4935 return false;
4938 if (!stmt_could_throw_p (cfun, stmt))
4940 error ("BB %i last statement has incorrectly set lp", bb->index);
4941 return true;
4944 if (eh_edge == NULL)
4946 error ("BB %i is missing an EH edge", bb->index);
4947 return true;
4950 if (eh_edge->dest != label_to_block (cfun, lp->post_landing_pad))
4952 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4953 return true;
4956 return false;
4959 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4961 DEBUG_FUNCTION bool
4962 verify_eh_dispatch_edge (geh_dispatch *stmt)
4964 eh_region r;
4965 eh_catch c;
4966 basic_block src, dst;
4967 bool want_fallthru = true;
4968 edge_iterator ei;
4969 edge e, fall_edge;
4971 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4972 src = gimple_bb (stmt);
4974 FOR_EACH_EDGE (e, ei, src->succs)
4975 gcc_assert (e->aux == NULL);
4977 switch (r->type)
4979 case ERT_TRY:
4980 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4982 dst = label_to_block (cfun, c->label);
4983 e = find_edge (src, dst);
4984 if (e == NULL)
4986 error ("BB %i is missing an edge", src->index);
4987 return true;
4989 e->aux = (void *)e;
4991 /* A catch-all handler doesn't have a fallthru. */
4992 if (c->type_list == NULL)
4994 want_fallthru = false;
4995 break;
4998 break;
5000 case ERT_ALLOWED_EXCEPTIONS:
5001 dst = label_to_block (cfun, r->u.allowed.label);
5002 e = find_edge (src, dst);
5003 if (e == NULL)
5005 error ("BB %i is missing an edge", src->index);
5006 return true;
5008 e->aux = (void *)e;
5009 break;
5011 default:
5012 gcc_unreachable ();
5015 fall_edge = NULL;
5016 FOR_EACH_EDGE (e, ei, src->succs)
5018 if (e->flags & EDGE_FALLTHRU)
5020 if (fall_edge != NULL)
5022 error ("BB %i too many fallthru edges", src->index);
5023 return true;
5025 fall_edge = e;
5027 else if (e->aux)
5028 e->aux = NULL;
5029 else
5031 error ("BB %i has incorrect edge", src->index);
5032 return true;
5035 if ((fall_edge != NULL) ^ want_fallthru)
5037 error ("BB %i has incorrect fallthru edge", src->index);
5038 return true;
5041 return false;
5044 #if __GNUC__ >= 10
5045 # pragma GCC diagnostic pop
5046 #endif