* ggc.h (empty_string): Delete.
[official-gcc.git] / gcc / tree-eh.c
blob79d02adbade937e603e6f0081e458e49f368f792
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "diagnostic-core.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "except.h"
35 #include "cfganal.h"
36 #include "cfgcleanup.h"
37 #include "tree-eh.h"
38 #include "gimple-iterator.h"
39 #include "tree-cfg.h"
40 #include "tree-into-ssa.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "gimple-low.h"
46 #include "asan.h"
48 /* In some instances a tree and a gimple need to be stored in a same table,
49 i.e. in hash tables. This is a structure to do this. */
50 typedef union {tree *tp; tree t; gimple *g;} treemple;
52 /* Misc functions used in this file. */
54 /* Remember and lookup EH landing pad data for arbitrary statements.
55 Really this means any statement that could_throw_p. We could
56 stuff this information into the stmt_ann data structure, but:
58 (1) We absolutely rely on this information being kept until
59 we get to rtl. Once we're done with lowering here, if we lose
60 the information there's no way to recover it!
62 (2) There are many more statements that *cannot* throw as
63 compared to those that can. We should be saving some amount
64 of space by only allocating memory for those that can throw. */
66 /* Add statement T in function IFUN to landing pad NUM. */
68 static void
69 add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
71 gcc_assert (num != 0);
73 if (!get_eh_throw_stmt_table (ifun))
74 set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
76 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
79 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
81 void
82 add_stmt_to_eh_lp (gimple *t, int num)
84 add_stmt_to_eh_lp_fn (cfun, t, num);
87 /* Add statement T to the single EH landing pad in REGION. */
89 static void
90 record_stmt_eh_region (eh_region region, gimple *t)
92 if (region == NULL)
93 return;
94 if (region->type == ERT_MUST_NOT_THROW)
95 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
96 else
98 eh_landing_pad lp = region->landing_pads;
99 if (lp == NULL)
100 lp = gen_eh_landing_pad (region);
101 else
102 gcc_assert (lp->next_lp == NULL);
103 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
108 /* Remove statement T in function IFUN from its EH landing pad. */
110 bool
111 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
113 if (!get_eh_throw_stmt_table (ifun))
114 return false;
116 if (!get_eh_throw_stmt_table (ifun)->get (t))
117 return false;
119 get_eh_throw_stmt_table (ifun)->remove (t);
120 return true;
124 /* Remove statement T in the current function (cfun) from its
125 EH landing pad. */
127 bool
128 remove_stmt_from_eh_lp (gimple *t)
130 return remove_stmt_from_eh_lp_fn (cfun, t);
133 /* Determine if statement T is inside an EH region in function IFUN.
134 Positive numbers indicate a landing pad index; negative numbers
135 indicate a MUST_NOT_THROW region index; zero indicates that the
136 statement is not recorded in the region table. */
139 lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
141 if (ifun->eh->throw_stmt_table == NULL)
142 return 0;
144 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
145 return lp_nr ? *lp_nr : 0;
148 /* Likewise, but always use the current function. */
151 lookup_stmt_eh_lp (gimple *t)
153 /* We can get called from initialized data when -fnon-call-exceptions
154 is on; prevent crash. */
155 if (!cfun)
156 return 0;
157 return lookup_stmt_eh_lp_fn (cfun, t);
160 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
161 nodes and LABEL_DECL nodes. We will use this during the second phase to
162 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
164 struct finally_tree_node
166 /* When storing a GIMPLE_TRY, we have to record a gimple. However
167 when deciding whether a GOTO to a certain LABEL_DECL (which is a
168 tree) leaves the TRY block, its necessary to record a tree in
169 this field. Thus a treemple is used. */
170 treemple child;
171 gtry *parent;
174 /* Hashtable helpers. */
176 struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
178 static inline hashval_t hash (const finally_tree_node *);
179 static inline bool equal (const finally_tree_node *,
180 const finally_tree_node *);
183 inline hashval_t
184 finally_tree_hasher::hash (const finally_tree_node *v)
186 return (intptr_t)v->child.t >> 4;
189 inline bool
190 finally_tree_hasher::equal (const finally_tree_node *v,
191 const finally_tree_node *c)
193 return v->child.t == c->child.t;
196 /* Note that this table is *not* marked GTY. It is short-lived. */
197 static hash_table<finally_tree_hasher> *finally_tree;
199 static void
200 record_in_finally_tree (treemple child, gtry *parent)
202 struct finally_tree_node *n;
203 finally_tree_node **slot;
205 n = XNEW (struct finally_tree_node);
206 n->child = child;
207 n->parent = parent;
209 slot = finally_tree->find_slot (n, INSERT);
210 gcc_assert (!*slot);
211 *slot = n;
214 static void
215 collect_finally_tree (gimple *stmt, gtry *region);
217 /* Go through the gimple sequence. Works with collect_finally_tree to
218 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
220 static void
221 collect_finally_tree_1 (gimple_seq seq, gtry *region)
223 gimple_stmt_iterator gsi;
225 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
226 collect_finally_tree (gsi_stmt (gsi), region);
229 static void
230 collect_finally_tree (gimple *stmt, gtry *region)
232 treemple temp;
234 switch (gimple_code (stmt))
236 case GIMPLE_LABEL:
237 temp.t = gimple_label_label (as_a <glabel *> (stmt));
238 record_in_finally_tree (temp, region);
239 break;
241 case GIMPLE_TRY:
242 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
244 temp.g = stmt;
245 record_in_finally_tree (temp, region);
246 collect_finally_tree_1 (gimple_try_eval (stmt),
247 as_a <gtry *> (stmt));
248 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
250 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
252 collect_finally_tree_1 (gimple_try_eval (stmt), region);
253 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
255 break;
257 case GIMPLE_CATCH:
258 collect_finally_tree_1 (gimple_catch_handler (
259 as_a <gcatch *> (stmt)),
260 region);
261 break;
263 case GIMPLE_EH_FILTER:
264 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
265 break;
267 case GIMPLE_EH_ELSE:
269 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
270 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
271 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
273 break;
275 default:
276 /* A type, a decl, or some kind of statement that we're not
277 interested in. Don't walk them. */
278 break;
283 /* Use the finally tree to determine if a jump from START to TARGET
284 would leave the try_finally node that START lives in. */
286 static bool
287 outside_finally_tree (treemple start, gimple *target)
289 struct finally_tree_node n, *p;
293 n.child = start;
294 p = finally_tree->find (&n);
295 if (!p)
296 return true;
297 start.g = p->parent;
299 while (start.g != target);
301 return false;
304 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
305 nodes into a set of gotos, magic labels, and eh regions.
306 The eh region creation is straight-forward, but frobbing all the gotos
307 and such into shape isn't. */
309 /* The sequence into which we record all EH stuff. This will be
310 placed at the end of the function when we're all done. */
311 static gimple_seq eh_seq;
313 /* Record whether an EH region contains something that can throw,
314 indexed by EH region number. */
315 static bitmap eh_region_may_contain_throw_map;
317 /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
318 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
319 The idea is to record a gimple statement for everything except for
320 the conditionals, which get their labels recorded. Since labels are
321 of type 'tree', we need this node to store both gimple and tree
322 objects. REPL_STMT is the sequence used to replace the goto/return
323 statement. CONT_STMT is used to store the statement that allows
324 the return/goto to jump to the original destination. */
326 struct goto_queue_node
328 treemple stmt;
329 location_t location;
330 gimple_seq repl_stmt;
331 gimple *cont_stmt;
332 int index;
333 /* This is used when index >= 0 to indicate that stmt is a label (as
334 opposed to a goto stmt). */
335 int is_label;
338 /* State of the world while lowering. */
340 struct leh_state
342 /* What's "current" while constructing the eh region tree. These
343 correspond to variables of the same name in cfun->eh, which we
344 don't have easy access to. */
345 eh_region cur_region;
347 /* What's "current" for the purposes of __builtin_eh_pointer. For
348 a CATCH, this is the associated TRY. For an EH_FILTER, this is
349 the associated ALLOWED_EXCEPTIONS, etc. */
350 eh_region ehp_region;
352 /* Processing of TRY_FINALLY requires a bit more state. This is
353 split out into a separate structure so that we don't have to
354 copy so much when processing other nodes. */
355 struct leh_tf_state *tf;
358 struct leh_tf_state
360 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
361 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
362 this so that outside_finally_tree can reliably reference the tree used
363 in the collect_finally_tree data structures. */
364 gtry *try_finally_expr;
365 gtry *top_p;
367 /* While lowering a top_p usually it is expanded into multiple statements,
368 thus we need the following field to store them. */
369 gimple_seq top_p_seq;
371 /* The state outside this try_finally node. */
372 struct leh_state *outer;
374 /* The exception region created for it. */
375 eh_region region;
377 /* The goto queue. */
378 struct goto_queue_node *goto_queue;
379 size_t goto_queue_size;
380 size_t goto_queue_active;
382 /* Pointer map to help in searching goto_queue when it is large. */
383 hash_map<gimple *, goto_queue_node *> *goto_queue_map;
385 /* The set of unique labels seen as entries in the goto queue. */
386 vec<tree> dest_array;
388 /* A label to be added at the end of the completed transformed
389 sequence. It will be set if may_fallthru was true *at one time*,
390 though subsequent transformations may have cleared that flag. */
391 tree fallthru_label;
393 /* True if it is possible to fall out the bottom of the try block.
394 Cleared if the fallthru is converted to a goto. */
395 bool may_fallthru;
397 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
398 bool may_return;
400 /* True if the finally block can receive an exception edge.
401 Cleared if the exception case is handled by code duplication. */
402 bool may_throw;
405 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
407 /* Search for STMT in the goto queue. Return the replacement,
408 or null if the statement isn't in the queue. */
410 #define LARGE_GOTO_QUEUE 20
412 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
414 static gimple_seq
415 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
417 unsigned int i;
419 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
421 for (i = 0; i < tf->goto_queue_active; i++)
422 if ( tf->goto_queue[i].stmt.g == stmt.g)
423 return tf->goto_queue[i].repl_stmt;
424 return NULL;
427 /* If we have a large number of entries in the goto_queue, create a
428 pointer map and use that for searching. */
430 if (!tf->goto_queue_map)
432 tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
433 for (i = 0; i < tf->goto_queue_active; i++)
435 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
436 &tf->goto_queue[i]);
437 gcc_assert (!existed);
441 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
442 if (slot != NULL)
443 return ((*slot)->repl_stmt);
445 return NULL;
448 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
449 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
450 then we can just splat it in, otherwise we add the new stmts immediately
451 after the GIMPLE_COND and redirect. */
453 static void
454 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
455 gimple_stmt_iterator *gsi)
457 tree label;
458 gimple_seq new_seq;
459 treemple temp;
460 location_t loc = gimple_location (gsi_stmt (*gsi));
462 temp.tp = tp;
463 new_seq = find_goto_replacement (tf, temp);
464 if (!new_seq)
465 return;
467 if (gimple_seq_singleton_p (new_seq)
468 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
470 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
471 return;
474 label = create_artificial_label (loc);
475 /* Set the new label for the GIMPLE_COND */
476 *tp = label;
478 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
479 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
482 /* The real work of replace_goto_queue. Returns with TSI updated to
483 point to the next statement. */
485 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
487 static void
488 replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
489 gimple_stmt_iterator *gsi)
491 gimple_seq seq;
492 treemple temp;
493 temp.g = NULL;
495 switch (gimple_code (stmt))
497 case GIMPLE_GOTO:
498 case GIMPLE_RETURN:
499 temp.g = stmt;
500 seq = find_goto_replacement (tf, temp);
501 if (seq)
503 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
504 gsi_remove (gsi, false);
505 return;
507 break;
509 case GIMPLE_COND:
510 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
511 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
512 break;
514 case GIMPLE_TRY:
515 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
516 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
517 break;
518 case GIMPLE_CATCH:
519 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
520 as_a <gcatch *> (stmt)),
521 tf);
522 break;
523 case GIMPLE_EH_FILTER:
524 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
525 break;
526 case GIMPLE_EH_ELSE:
528 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
529 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
530 tf);
531 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
532 tf);
534 break;
536 default:
537 /* These won't have gotos in them. */
538 break;
541 gsi_next (gsi);
544 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
546 static void
547 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
549 gimple_stmt_iterator gsi = gsi_start (*seq);
551 while (!gsi_end_p (gsi))
552 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
555 /* Replace all goto queue members. */
557 static void
558 replace_goto_queue (struct leh_tf_state *tf)
560 if (tf->goto_queue_active == 0)
561 return;
562 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
563 replace_goto_queue_stmt_list (&eh_seq, tf);
566 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
567 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
568 a gimple return. */
570 static void
571 record_in_goto_queue (struct leh_tf_state *tf,
572 treemple new_stmt,
573 int index,
574 bool is_label,
575 location_t location)
577 size_t active, size;
578 struct goto_queue_node *q;
580 gcc_assert (!tf->goto_queue_map);
582 active = tf->goto_queue_active;
583 size = tf->goto_queue_size;
584 if (active >= size)
586 size = (size ? size * 2 : 32);
587 tf->goto_queue_size = size;
588 tf->goto_queue
589 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
592 q = &tf->goto_queue[active];
593 tf->goto_queue_active = active + 1;
595 memset (q, 0, sizeof (*q));
596 q->stmt = new_stmt;
597 q->index = index;
598 q->location = location;
599 q->is_label = is_label;
602 /* Record the LABEL label in the goto queue contained in TF.
603 TF is not null. */
605 static void
606 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
607 location_t location)
609 int index;
610 treemple temp, new_stmt;
612 if (!label)
613 return;
615 /* Computed and non-local gotos do not get processed. Given
616 their nature we can neither tell whether we've escaped the
617 finally block nor redirect them if we knew. */
618 if (TREE_CODE (label) != LABEL_DECL)
619 return;
621 /* No need to record gotos that don't leave the try block. */
622 temp.t = label;
623 if (!outside_finally_tree (temp, tf->try_finally_expr))
624 return;
626 if (! tf->dest_array.exists ())
628 tf->dest_array.create (10);
629 tf->dest_array.quick_push (label);
630 index = 0;
632 else
634 int n = tf->dest_array.length ();
635 for (index = 0; index < n; ++index)
636 if (tf->dest_array[index] == label)
637 break;
638 if (index == n)
639 tf->dest_array.safe_push (label);
642 /* In the case of a GOTO we want to record the destination label,
643 since with a GIMPLE_COND we have an easy access to the then/else
644 labels. */
645 new_stmt = stmt;
646 record_in_goto_queue (tf, new_stmt, index, true, location);
649 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
650 node, and if so record that fact in the goto queue associated with that
651 try_finally node. */
653 static void
654 maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
656 struct leh_tf_state *tf = state->tf;
657 treemple new_stmt;
659 if (!tf)
660 return;
662 switch (gimple_code (stmt))
664 case GIMPLE_COND:
666 gcond *cond_stmt = as_a <gcond *> (stmt);
667 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
668 record_in_goto_queue_label (tf, new_stmt,
669 gimple_cond_true_label (cond_stmt),
670 EXPR_LOCATION (*new_stmt.tp));
671 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
672 record_in_goto_queue_label (tf, new_stmt,
673 gimple_cond_false_label (cond_stmt),
674 EXPR_LOCATION (*new_stmt.tp));
676 break;
677 case GIMPLE_GOTO:
678 new_stmt.g = stmt;
679 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
680 gimple_location (stmt));
681 break;
683 case GIMPLE_RETURN:
684 tf->may_return = true;
685 new_stmt.g = stmt;
686 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
687 break;
689 default:
690 gcc_unreachable ();
695 #if CHECKING_P
696 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
697 was in fact structured, and we've not yet done jump threading, then none
698 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
700 static void
701 verify_norecord_switch_expr (struct leh_state *state,
702 gswitch *switch_expr)
704 struct leh_tf_state *tf = state->tf;
705 size_t i, n;
707 if (!tf)
708 return;
710 n = gimple_switch_num_labels (switch_expr);
712 for (i = 0; i < n; ++i)
714 treemple temp;
715 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
716 temp.t = lab;
717 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
720 #else
721 #define verify_norecord_switch_expr(state, switch_expr)
722 #endif
724 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
725 non-null, insert it before the new branch. */
727 static void
728 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
730 gimple *x;
732 /* In the case of a return, the queue node must be a gimple statement. */
733 gcc_assert (!q->is_label);
735 /* Note that the return value may have already been computed, e.g.,
737 int x;
738 int foo (void)
740 x = 0;
741 try {
742 return x;
743 } finally {
744 x++;
748 should return 0, not 1. We don't have to do anything to make
749 this happens because the return value has been placed in the
750 RESULT_DECL already. */
752 q->cont_stmt = q->stmt.g;
754 if (mod)
755 gimple_seq_add_seq (&q->repl_stmt, mod);
757 x = gimple_build_goto (finlab);
758 gimple_set_location (x, q->location);
759 gimple_seq_add_stmt (&q->repl_stmt, x);
762 /* Similar, but easier, for GIMPLE_GOTO. */
764 static void
765 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
766 struct leh_tf_state *tf)
768 ggoto *x;
770 gcc_assert (q->is_label);
772 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
774 if (mod)
775 gimple_seq_add_seq (&q->repl_stmt, mod);
777 x = gimple_build_goto (finlab);
778 gimple_set_location (x, q->location);
779 gimple_seq_add_stmt (&q->repl_stmt, x);
782 /* Emit a standard landing pad sequence into SEQ for REGION. */
784 static void
785 emit_post_landing_pad (gimple_seq *seq, eh_region region)
787 eh_landing_pad lp = region->landing_pads;
788 glabel *x;
790 if (lp == NULL)
791 lp = gen_eh_landing_pad (region);
793 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
794 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
796 x = gimple_build_label (lp->post_landing_pad);
797 gimple_seq_add_stmt (seq, x);
800 /* Emit a RESX statement into SEQ for REGION. */
802 static void
803 emit_resx (gimple_seq *seq, eh_region region)
805 gresx *x = gimple_build_resx (region->index);
806 gimple_seq_add_stmt (seq, x);
807 if (region->outer)
808 record_stmt_eh_region (region->outer, x);
811 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
813 static void
814 emit_eh_dispatch (gimple_seq *seq, eh_region region)
816 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
817 gimple_seq_add_stmt (seq, x);
820 /* Note that the current EH region may contain a throw, or a
821 call to a function which itself may contain a throw. */
823 static void
824 note_eh_region_may_contain_throw (eh_region region)
826 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
828 if (region->type == ERT_MUST_NOT_THROW)
829 break;
830 region = region->outer;
831 if (region == NULL)
832 break;
836 /* Check if REGION has been marked as containing a throw. If REGION is
837 NULL, this predicate is false. */
839 static inline bool
840 eh_region_may_contain_throw (eh_region r)
842 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
845 /* We want to transform
846 try { body; } catch { stuff; }
848 normal_sequence:
849 body;
850 over:
851 eh_sequence:
852 landing_pad:
853 stuff;
854 goto over;
856 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
857 should be placed before the second operand, or NULL. OVER is
858 an existing label that should be put at the exit, or NULL. */
860 static gimple_seq
861 frob_into_branch_around (gtry *tp, eh_region region, tree over)
863 gimple *x;
864 gimple_seq cleanup, result;
865 location_t loc = gimple_location (tp);
867 cleanup = gimple_try_cleanup (tp);
868 result = gimple_try_eval (tp);
870 if (region)
871 emit_post_landing_pad (&eh_seq, region);
873 if (gimple_seq_may_fallthru (cleanup))
875 if (!over)
876 over = create_artificial_label (loc);
877 x = gimple_build_goto (over);
878 gimple_set_location (x, loc);
879 gimple_seq_add_stmt (&cleanup, x);
881 gimple_seq_add_seq (&eh_seq, cleanup);
883 if (over)
885 x = gimple_build_label (over);
886 gimple_seq_add_stmt (&result, x);
888 return result;
891 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
892 Make sure to record all new labels found. */
894 static gimple_seq
895 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
896 location_t loc)
898 gtry *region = NULL;
899 gimple_seq new_seq;
900 gimple_stmt_iterator gsi;
902 new_seq = copy_gimple_seq_and_replace_locals (seq);
904 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
906 gimple *stmt = gsi_stmt (gsi);
907 /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
908 it on the EH paths. When it is not eliminated, make it transparent in
909 the debug info. */
910 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
911 gimple_set_location (stmt, UNKNOWN_LOCATION);
912 else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
914 tree block = gimple_block (stmt);
915 gimple_set_location (stmt, loc);
916 gimple_set_block (stmt, block);
920 if (outer_state->tf)
921 region = outer_state->tf->try_finally_expr;
922 collect_finally_tree_1 (new_seq, region);
924 return new_seq;
927 /* A subroutine of lower_try_finally. Create a fallthru label for
928 the given try_finally state. The only tricky bit here is that
929 we have to make sure to record the label in our outer context. */
931 static tree
932 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
934 tree label = tf->fallthru_label;
935 treemple temp;
937 if (!label)
939 label = create_artificial_label (gimple_location (tf->try_finally_expr));
940 tf->fallthru_label = label;
941 if (tf->outer->tf)
943 temp.t = label;
944 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
947 return label;
950 /* A subroutine of lower_try_finally. If FINALLY consits of a
951 GIMPLE_EH_ELSE node, return it. */
953 static inline geh_else *
954 get_eh_else (gimple_seq finally)
956 gimple *x = gimple_seq_first_stmt (finally);
957 if (gimple_code (x) == GIMPLE_EH_ELSE)
959 gcc_assert (gimple_seq_singleton_p (finally));
960 return as_a <geh_else *> (x);
962 return NULL;
965 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
966 langhook returns non-null, then the language requires that the exception
967 path out of a try_finally be treated specially. To wit: the code within
968 the finally block may not itself throw an exception. We have two choices
969 here. First we can duplicate the finally block and wrap it in a
970 must_not_throw region. Second, we can generate code like
972 try {
973 finally_block;
974 } catch {
975 if (fintmp == eh_edge)
976 protect_cleanup_actions;
979 where "fintmp" is the temporary used in the switch statement generation
980 alternative considered below. For the nonce, we always choose the first
981 option.
983 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
985 static void
986 honor_protect_cleanup_actions (struct leh_state *outer_state,
987 struct leh_state *this_state,
988 struct leh_tf_state *tf)
990 gimple_seq finally = gimple_try_cleanup (tf->top_p);
992 /* EH_ELSE doesn't come from user code; only compiler generated stuff.
993 It does need to be handled here, so as to separate the (different)
994 EH path from the normal path. But we should not attempt to wrap
995 it with a must-not-throw node (which indeed gets in the way). */
996 if (geh_else *eh_else = get_eh_else (finally))
998 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
999 finally = gimple_eh_else_e_body (eh_else);
1001 /* Let the ELSE see the exception that's being processed. */
1002 eh_region save_ehp = this_state->ehp_region;
1003 this_state->ehp_region = this_state->cur_region;
1004 lower_eh_constructs_1 (this_state, &finally);
1005 this_state->ehp_region = save_ehp;
1007 else
1009 /* First check for nothing to do. */
1010 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1011 return;
1012 tree actions = lang_hooks.eh_protect_cleanup_actions ();
1013 if (actions == NULL)
1014 return;
1016 if (this_state)
1017 finally = lower_try_finally_dup_block (finally, outer_state,
1018 gimple_location (tf->try_finally_expr));
1020 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1021 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1022 to be in an enclosing scope, but needs to be implemented at this level
1023 to avoid a nesting violation (see wrap_temporary_cleanups in
1024 cp/decl.c). Since it's logically at an outer level, we should call
1025 terminate before we get to it, so strip it away before adding the
1026 MUST_NOT_THROW filter. */
1027 gimple_stmt_iterator gsi = gsi_start (finally);
1028 gimple *x = gsi_stmt (gsi);
1029 if (gimple_code (x) == GIMPLE_TRY
1030 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1031 && gimple_try_catch_is_cleanup (x))
1033 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1034 gsi_remove (&gsi, false);
1037 /* Wrap the block with protect_cleanup_actions as the action. */
1038 geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions);
1039 gtry *try_stmt = gimple_build_try (finally,
1040 gimple_seq_alloc_with_stmt (eh_mnt),
1041 GIMPLE_TRY_CATCH);
1042 finally = lower_eh_must_not_throw (outer_state, try_stmt);
1045 /* Drop all of this into the exception sequence. */
1046 emit_post_landing_pad (&eh_seq, tf->region);
1047 gimple_seq_add_seq (&eh_seq, finally);
1048 if (gimple_seq_may_fallthru (finally))
1049 emit_resx (&eh_seq, tf->region);
1051 /* Having now been handled, EH isn't to be considered with
1052 the rest of the outgoing edges. */
1053 tf->may_throw = false;
1056 /* A subroutine of lower_try_finally. We have determined that there is
1057 no fallthru edge out of the finally block. This means that there is
1058 no outgoing edge corresponding to any incoming edge. Restructure the
1059 try_finally node for this special case. */
1061 static void
1062 lower_try_finally_nofallthru (struct leh_state *state,
1063 struct leh_tf_state *tf)
1065 tree lab;
1066 gimple *x;
1067 geh_else *eh_else;
1068 gimple_seq finally;
1069 struct goto_queue_node *q, *qe;
1071 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1073 /* We expect that tf->top_p is a GIMPLE_TRY. */
1074 finally = gimple_try_cleanup (tf->top_p);
1075 tf->top_p_seq = gimple_try_eval (tf->top_p);
1077 x = gimple_build_label (lab);
1078 gimple_seq_add_stmt (&tf->top_p_seq, x);
1080 q = tf->goto_queue;
1081 qe = q + tf->goto_queue_active;
1082 for (; q < qe; ++q)
1083 if (q->index < 0)
1084 do_return_redirection (q, lab, NULL);
1085 else
1086 do_goto_redirection (q, lab, NULL, tf);
1088 replace_goto_queue (tf);
1090 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1091 eh_else = get_eh_else (finally);
1092 if (eh_else)
1094 finally = gimple_eh_else_n_body (eh_else);
1095 lower_eh_constructs_1 (state, &finally);
1096 gimple_seq_add_seq (&tf->top_p_seq, finally);
1098 if (tf->may_throw)
1100 finally = gimple_eh_else_e_body (eh_else);
1101 lower_eh_constructs_1 (state, &finally);
1103 emit_post_landing_pad (&eh_seq, tf->region);
1104 gimple_seq_add_seq (&eh_seq, finally);
1107 else
1109 lower_eh_constructs_1 (state, &finally);
1110 gimple_seq_add_seq (&tf->top_p_seq, finally);
1112 if (tf->may_throw)
1114 emit_post_landing_pad (&eh_seq, tf->region);
1116 x = gimple_build_goto (lab);
1117 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1118 gimple_seq_add_stmt (&eh_seq, x);
1123 /* A subroutine of lower_try_finally. We have determined that there is
1124 exactly one destination of the finally block. Restructure the
1125 try_finally node for this special case. */
1127 static void
1128 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1130 struct goto_queue_node *q, *qe;
1131 geh_else *eh_else;
1132 glabel *label_stmt;
1133 gimple *x;
1134 gimple_seq finally;
1135 gimple_stmt_iterator gsi;
1136 tree finally_label;
1137 location_t loc = gimple_location (tf->try_finally_expr);
1139 finally = gimple_try_cleanup (tf->top_p);
1140 tf->top_p_seq = gimple_try_eval (tf->top_p);
1142 /* Since there's only one destination, and the destination edge can only
1143 either be EH or non-EH, that implies that all of our incoming edges
1144 are of the same type. Therefore we can lower EH_ELSE immediately. */
1145 eh_else = get_eh_else (finally);
1146 if (eh_else)
1148 if (tf->may_throw)
1149 finally = gimple_eh_else_e_body (eh_else);
1150 else
1151 finally = gimple_eh_else_n_body (eh_else);
1154 lower_eh_constructs_1 (state, &finally);
1156 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1158 gimple *stmt = gsi_stmt (gsi);
1159 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1161 tree block = gimple_block (stmt);
1162 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1163 gimple_set_block (stmt, block);
1167 if (tf->may_throw)
1169 /* Only reachable via the exception edge. Add the given label to
1170 the head of the FINALLY block. Append a RESX at the end. */
1171 emit_post_landing_pad (&eh_seq, tf->region);
1172 gimple_seq_add_seq (&eh_seq, finally);
1173 emit_resx (&eh_seq, tf->region);
1174 return;
1177 if (tf->may_fallthru)
1179 /* Only reachable via the fallthru edge. Do nothing but let
1180 the two blocks run together; we'll fall out the bottom. */
1181 gimple_seq_add_seq (&tf->top_p_seq, finally);
1182 return;
1185 finally_label = create_artificial_label (loc);
1186 label_stmt = gimple_build_label (finally_label);
1187 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
1189 gimple_seq_add_seq (&tf->top_p_seq, finally);
1191 q = tf->goto_queue;
1192 qe = q + tf->goto_queue_active;
1194 if (tf->may_return)
1196 /* Reachable by return expressions only. Redirect them. */
1197 for (; q < qe; ++q)
1198 do_return_redirection (q, finally_label, NULL);
1199 replace_goto_queue (tf);
1201 else
1203 /* Reachable by goto expressions only. Redirect them. */
1204 for (; q < qe; ++q)
1205 do_goto_redirection (q, finally_label, NULL, tf);
1206 replace_goto_queue (tf);
1208 if (tf->dest_array[0] == tf->fallthru_label)
1210 /* Reachable by goto to fallthru label only. Redirect it
1211 to the new label (already created, sadly), and do not
1212 emit the final branch out, or the fallthru label. */
1213 tf->fallthru_label = NULL;
1214 return;
1218 /* Place the original return/goto to the original destination
1219 immediately after the finally block. */
1220 x = tf->goto_queue[0].cont_stmt;
1221 gimple_seq_add_stmt (&tf->top_p_seq, x);
1222 maybe_record_in_goto_queue (state, x);
1225 /* A subroutine of lower_try_finally. There are multiple edges incoming
1226 and outgoing from the finally block. Implement this by duplicating the
1227 finally block for every destination. */
1229 static void
1230 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1232 gimple_seq finally;
1233 gimple_seq new_stmt;
1234 gimple_seq seq;
1235 gimple *x;
1236 geh_else *eh_else;
1237 tree tmp;
1238 location_t tf_loc = gimple_location (tf->try_finally_expr);
1240 finally = gimple_try_cleanup (tf->top_p);
1242 /* Notice EH_ELSE, and simplify some of the remaining code
1243 by considering FINALLY to be the normal return path only. */
1244 eh_else = get_eh_else (finally);
1245 if (eh_else)
1246 finally = gimple_eh_else_n_body (eh_else);
1248 tf->top_p_seq = gimple_try_eval (tf->top_p);
1249 new_stmt = NULL;
1251 if (tf->may_fallthru)
1253 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1254 lower_eh_constructs_1 (state, &seq);
1255 gimple_seq_add_seq (&new_stmt, seq);
1257 tmp = lower_try_finally_fallthru_label (tf);
1258 x = gimple_build_goto (tmp);
1259 gimple_set_location (x, tf_loc);
1260 gimple_seq_add_stmt (&new_stmt, x);
1263 if (tf->may_throw)
1265 /* We don't need to copy the EH path of EH_ELSE,
1266 since it is only emitted once. */
1267 if (eh_else)
1268 seq = gimple_eh_else_e_body (eh_else);
1269 else
1270 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1271 lower_eh_constructs_1 (state, &seq);
1273 emit_post_landing_pad (&eh_seq, tf->region);
1274 gimple_seq_add_seq (&eh_seq, seq);
1275 emit_resx (&eh_seq, tf->region);
1278 if (tf->goto_queue)
1280 struct goto_queue_node *q, *qe;
1281 int return_index, index;
1282 struct labels_s
1284 struct goto_queue_node *q;
1285 tree label;
1286 } *labels;
1288 return_index = tf->dest_array.length ();
1289 labels = XCNEWVEC (struct labels_s, return_index + 1);
1291 q = tf->goto_queue;
1292 qe = q + tf->goto_queue_active;
1293 for (; q < qe; q++)
1295 index = q->index < 0 ? return_index : q->index;
1297 if (!labels[index].q)
1298 labels[index].q = q;
1301 for (index = 0; index < return_index + 1; index++)
1303 tree lab;
1305 q = labels[index].q;
1306 if (! q)
1307 continue;
1309 lab = labels[index].label
1310 = create_artificial_label (tf_loc);
1312 if (index == return_index)
1313 do_return_redirection (q, lab, NULL);
1314 else
1315 do_goto_redirection (q, lab, NULL, tf);
1317 x = gimple_build_label (lab);
1318 gimple_seq_add_stmt (&new_stmt, x);
1320 seq = lower_try_finally_dup_block (finally, state, q->location);
1321 lower_eh_constructs_1 (state, &seq);
1322 gimple_seq_add_seq (&new_stmt, seq);
1324 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1325 maybe_record_in_goto_queue (state, q->cont_stmt);
1328 for (q = tf->goto_queue; q < qe; q++)
1330 tree lab;
1332 index = q->index < 0 ? return_index : q->index;
1334 if (labels[index].q == q)
1335 continue;
1337 lab = labels[index].label;
1339 if (index == return_index)
1340 do_return_redirection (q, lab, NULL);
1341 else
1342 do_goto_redirection (q, lab, NULL, tf);
1345 replace_goto_queue (tf);
1346 free (labels);
1349 /* Need to link new stmts after running replace_goto_queue due
1350 to not wanting to process the same goto stmts twice. */
1351 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1354 /* A subroutine of lower_try_finally. There are multiple edges incoming
1355 and outgoing from the finally block. Implement this by instrumenting
1356 each incoming edge and creating a switch statement at the end of the
1357 finally block that branches to the appropriate destination. */
1359 static void
1360 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1362 struct goto_queue_node *q, *qe;
1363 tree finally_tmp, finally_label;
1364 int return_index, eh_index, fallthru_index;
1365 int nlabels, ndests, j, last_case_index;
1366 tree last_case;
1367 auto_vec<tree> case_label_vec;
1368 gimple_seq switch_body = NULL;
1369 gimple *x;
1370 geh_else *eh_else;
1371 tree tmp;
1372 gimple *switch_stmt;
1373 gimple_seq finally;
1374 hash_map<tree, gimple *> *cont_map = NULL;
1375 /* The location of the TRY_FINALLY stmt. */
1376 location_t tf_loc = gimple_location (tf->try_finally_expr);
1377 /* The location of the finally block. */
1378 location_t finally_loc;
1380 finally = gimple_try_cleanup (tf->top_p);
1381 eh_else = get_eh_else (finally);
1383 /* Mash the TRY block to the head of the chain. */
1384 tf->top_p_seq = gimple_try_eval (tf->top_p);
1386 /* The location of the finally is either the last stmt in the finally
1387 block or the location of the TRY_FINALLY itself. */
1388 x = gimple_seq_last_stmt (finally);
1389 finally_loc = x ? gimple_location (x) : tf_loc;
1391 /* Prepare for switch statement generation. */
1392 nlabels = tf->dest_array.length ();
1393 return_index = nlabels;
1394 eh_index = return_index + tf->may_return;
1395 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1396 ndests = fallthru_index + tf->may_fallthru;
1398 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1399 finally_label = create_artificial_label (finally_loc);
1401 /* We use vec::quick_push on case_label_vec throughout this function,
1402 since we know the size in advance and allocate precisely as muce
1403 space as needed. */
1404 case_label_vec.create (ndests);
1405 last_case = NULL;
1406 last_case_index = 0;
1408 /* Begin inserting code for getting to the finally block. Things
1409 are done in this order to correspond to the sequence the code is
1410 laid out. */
1412 if (tf->may_fallthru)
1414 x = gimple_build_assign (finally_tmp,
1415 build_int_cst (integer_type_node,
1416 fallthru_index));
1417 gimple_seq_add_stmt (&tf->top_p_seq, x);
1419 tmp = build_int_cst (integer_type_node, fallthru_index);
1420 last_case = build_case_label (tmp, NULL,
1421 create_artificial_label (tf_loc));
1422 case_label_vec.quick_push (last_case);
1423 last_case_index++;
1425 x = gimple_build_label (CASE_LABEL (last_case));
1426 gimple_seq_add_stmt (&switch_body, x);
1428 tmp = lower_try_finally_fallthru_label (tf);
1429 x = gimple_build_goto (tmp);
1430 gimple_set_location (x, tf_loc);
1431 gimple_seq_add_stmt (&switch_body, x);
1434 /* For EH_ELSE, emit the exception path (plus resx) now, then
1435 subsequently we only need consider the normal path. */
1436 if (eh_else)
1438 if (tf->may_throw)
1440 finally = gimple_eh_else_e_body (eh_else);
1441 lower_eh_constructs_1 (state, &finally);
1443 emit_post_landing_pad (&eh_seq, tf->region);
1444 gimple_seq_add_seq (&eh_seq, finally);
1445 emit_resx (&eh_seq, tf->region);
1448 finally = gimple_eh_else_n_body (eh_else);
1450 else if (tf->may_throw)
1452 emit_post_landing_pad (&eh_seq, tf->region);
1454 x = gimple_build_assign (finally_tmp,
1455 build_int_cst (integer_type_node, eh_index));
1456 gimple_seq_add_stmt (&eh_seq, x);
1458 x = gimple_build_goto (finally_label);
1459 gimple_set_location (x, tf_loc);
1460 gimple_seq_add_stmt (&eh_seq, x);
1462 tmp = build_int_cst (integer_type_node, eh_index);
1463 last_case = build_case_label (tmp, NULL,
1464 create_artificial_label (tf_loc));
1465 case_label_vec.quick_push (last_case);
1466 last_case_index++;
1468 x = gimple_build_label (CASE_LABEL (last_case));
1469 gimple_seq_add_stmt (&eh_seq, x);
1470 emit_resx (&eh_seq, tf->region);
1473 x = gimple_build_label (finally_label);
1474 gimple_seq_add_stmt (&tf->top_p_seq, x);
1476 lower_eh_constructs_1 (state, &finally);
1477 gimple_seq_add_seq (&tf->top_p_seq, finally);
1479 /* Redirect each incoming goto edge. */
1480 q = tf->goto_queue;
1481 qe = q + tf->goto_queue_active;
1482 j = last_case_index + tf->may_return;
1483 /* Prepare the assignments to finally_tmp that are executed upon the
1484 entrance through a particular edge. */
1485 for (; q < qe; ++q)
1487 gimple_seq mod = NULL;
1488 int switch_id;
1489 unsigned int case_index;
1491 if (q->index < 0)
1493 x = gimple_build_assign (finally_tmp,
1494 build_int_cst (integer_type_node,
1495 return_index));
1496 gimple_seq_add_stmt (&mod, x);
1497 do_return_redirection (q, finally_label, mod);
1498 switch_id = return_index;
1500 else
1502 x = gimple_build_assign (finally_tmp,
1503 build_int_cst (integer_type_node, q->index));
1504 gimple_seq_add_stmt (&mod, x);
1505 do_goto_redirection (q, finally_label, mod, tf);
1506 switch_id = q->index;
1509 case_index = j + q->index;
1510 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1512 tree case_lab;
1513 tmp = build_int_cst (integer_type_node, switch_id);
1514 case_lab = build_case_label (tmp, NULL,
1515 create_artificial_label (tf_loc));
1516 /* We store the cont_stmt in the pointer map, so that we can recover
1517 it in the loop below. */
1518 if (!cont_map)
1519 cont_map = new hash_map<tree, gimple *>;
1520 cont_map->put (case_lab, q->cont_stmt);
1521 case_label_vec.quick_push (case_lab);
1524 for (j = last_case_index; j < last_case_index + nlabels; j++)
1526 gimple *cont_stmt;
1528 last_case = case_label_vec[j];
1530 gcc_assert (last_case);
1531 gcc_assert (cont_map);
1533 cont_stmt = *cont_map->get (last_case);
1535 x = gimple_build_label (CASE_LABEL (last_case));
1536 gimple_seq_add_stmt (&switch_body, x);
1537 gimple_seq_add_stmt (&switch_body, cont_stmt);
1538 maybe_record_in_goto_queue (state, cont_stmt);
1540 if (cont_map)
1541 delete cont_map;
1543 replace_goto_queue (tf);
1545 /* Make sure that the last case is the default label, as one is required.
1546 Then sort the labels, which is also required in GIMPLE. */
1547 CASE_LOW (last_case) = NULL;
1548 tree tem = case_label_vec.pop ();
1549 gcc_assert (tem == last_case);
1550 sort_case_labels (case_label_vec);
1552 /* Build the switch statement, setting last_case to be the default
1553 label. */
1554 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1555 case_label_vec);
1556 gimple_set_location (switch_stmt, finally_loc);
1558 /* Need to link SWITCH_STMT after running replace_goto_queue
1559 due to not wanting to process the same goto stmts twice. */
1560 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1561 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1564 /* Decide whether or not we are going to duplicate the finally block.
1565 There are several considerations.
1567 First, if this is Java, then the finally block contains code
1568 written by the user. It has line numbers associated with it,
1569 so duplicating the block means it's difficult to set a breakpoint.
1570 Since controlling code generation via -g is verboten, we simply
1571 never duplicate code without optimization.
1573 Second, we'd like to prevent egregious code growth. One way to
1574 do this is to estimate the size of the finally block, multiply
1575 that by the number of copies we'd need to make, and compare against
1576 the estimate of the size of the switch machinery we'd have to add. */
1578 static bool
1579 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1581 int f_estimate, sw_estimate;
1582 geh_else *eh_else;
1584 /* If there's an EH_ELSE involved, the exception path is separate
1585 and really doesn't come into play for this computation. */
1586 eh_else = get_eh_else (finally);
1587 if (eh_else)
1589 ndests -= may_throw;
1590 finally = gimple_eh_else_n_body (eh_else);
1593 if (!optimize)
1595 gimple_stmt_iterator gsi;
1597 if (ndests == 1)
1598 return true;
1600 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1602 /* Duplicate __builtin_stack_restore in the hope of eliminating it
1603 on the EH paths and, consequently, useless cleanups. */
1604 gimple *stmt = gsi_stmt (gsi);
1605 if (!is_gimple_debug (stmt)
1606 && !gimple_clobber_p (stmt)
1607 && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1608 return false;
1610 return true;
1613 /* Finally estimate N times, plus N gotos. */
1614 f_estimate = estimate_num_insns_seq (finally, &eni_size_weights);
1615 f_estimate = (f_estimate + 1) * ndests;
1617 /* Switch statement (cost 10), N variable assignments, N gotos. */
1618 sw_estimate = 10 + 2 * ndests;
1620 /* Optimize for size clearly wants our best guess. */
1621 if (optimize_function_for_size_p (cfun))
1622 return f_estimate < sw_estimate;
1624 /* ??? These numbers are completely made up so far. */
1625 if (optimize > 1)
1626 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1627 else
1628 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1631 /* REG is the enclosing region for a possible cleanup region, or the region
1632 itself. Returns TRUE if such a region would be unreachable.
1634 Cleanup regions within a must-not-throw region aren't actually reachable
1635 even if there are throwing stmts within them, because the personality
1636 routine will call terminate before unwinding. */
1638 static bool
1639 cleanup_is_dead_in (eh_region reg)
1641 while (reg && reg->type == ERT_CLEANUP)
1642 reg = reg->outer;
1643 return (reg && reg->type == ERT_MUST_NOT_THROW);
1646 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1647 to a sequence of labels and blocks, plus the exception region trees
1648 that record all the magic. This is complicated by the need to
1649 arrange for the FINALLY block to be executed on all exits. */
1651 static gimple_seq
1652 lower_try_finally (struct leh_state *state, gtry *tp)
1654 struct leh_tf_state this_tf;
1655 struct leh_state this_state;
1656 int ndests;
1657 gimple_seq old_eh_seq;
1659 /* Process the try block. */
1661 memset (&this_tf, 0, sizeof (this_tf));
1662 this_tf.try_finally_expr = tp;
1663 this_tf.top_p = tp;
1664 this_tf.outer = state;
1665 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
1667 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1668 this_state.cur_region = this_tf.region;
1670 else
1672 this_tf.region = NULL;
1673 this_state.cur_region = state->cur_region;
1676 this_state.ehp_region = state->ehp_region;
1677 this_state.tf = &this_tf;
1679 old_eh_seq = eh_seq;
1680 eh_seq = NULL;
1682 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1684 /* Determine if the try block is escaped through the bottom. */
1685 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1687 /* Determine if any exceptions are possible within the try block. */
1688 if (this_tf.region)
1689 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1690 if (this_tf.may_throw)
1691 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1693 /* Determine how many edges (still) reach the finally block. Or rather,
1694 how many destinations are reached by the finally block. Use this to
1695 determine how we process the finally block itself. */
1697 ndests = this_tf.dest_array.length ();
1698 ndests += this_tf.may_fallthru;
1699 ndests += this_tf.may_return;
1700 ndests += this_tf.may_throw;
1702 /* If the FINALLY block is not reachable, dike it out. */
1703 if (ndests == 0)
1705 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1706 gimple_try_set_cleanup (tp, NULL);
1708 /* If the finally block doesn't fall through, then any destination
1709 we might try to impose there isn't reached either. There may be
1710 some minor amount of cleanup and redirection still needed. */
1711 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1712 lower_try_finally_nofallthru (state, &this_tf);
1714 /* We can easily special-case redirection to a single destination. */
1715 else if (ndests == 1)
1716 lower_try_finally_onedest (state, &this_tf);
1717 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1718 gimple_try_cleanup (tp)))
1719 lower_try_finally_copy (state, &this_tf);
1720 else
1721 lower_try_finally_switch (state, &this_tf);
1723 /* If someone requested we add a label at the end of the transformed
1724 block, do so. */
1725 if (this_tf.fallthru_label)
1727 /* This must be reached only if ndests == 0. */
1728 gimple *x = gimple_build_label (this_tf.fallthru_label);
1729 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1732 this_tf.dest_array.release ();
1733 free (this_tf.goto_queue);
1734 if (this_tf.goto_queue_map)
1735 delete this_tf.goto_queue_map;
1737 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1738 If there was no old eh_seq, then the append is trivially already done. */
1739 if (old_eh_seq)
1741 if (eh_seq == NULL)
1742 eh_seq = old_eh_seq;
1743 else
1745 gimple_seq new_eh_seq = eh_seq;
1746 eh_seq = old_eh_seq;
1747 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1751 return this_tf.top_p_seq;
1754 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1755 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1756 exception region trees that records all the magic. */
1758 static gimple_seq
1759 lower_catch (struct leh_state *state, gtry *tp)
1761 eh_region try_region = NULL;
1762 struct leh_state this_state = *state;
1763 gimple_stmt_iterator gsi;
1764 tree out_label;
1765 gimple_seq new_seq, cleanup;
1766 gimple *x;
1767 location_t try_catch_loc = gimple_location (tp);
1769 if (flag_exceptions)
1771 try_region = gen_eh_region_try (state->cur_region);
1772 this_state.cur_region = try_region;
1775 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1777 if (!eh_region_may_contain_throw (try_region))
1778 return gimple_try_eval (tp);
1780 new_seq = NULL;
1781 emit_eh_dispatch (&new_seq, try_region);
1782 emit_resx (&new_seq, try_region);
1784 this_state.cur_region = state->cur_region;
1785 this_state.ehp_region = try_region;
1787 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1788 itself, so that e.g. for coverage purposes the nested cleanups don't
1789 appear before the cleanup body. See PR64634 for details. */
1790 gimple_seq old_eh_seq = eh_seq;
1791 eh_seq = NULL;
1793 out_label = NULL;
1794 cleanup = gimple_try_cleanup (tp);
1795 for (gsi = gsi_start (cleanup);
1796 !gsi_end_p (gsi);
1797 gsi_next (&gsi))
1799 eh_catch c;
1800 gcatch *catch_stmt;
1801 gimple_seq handler;
1803 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1804 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
1806 handler = gimple_catch_handler (catch_stmt);
1807 lower_eh_constructs_1 (&this_state, &handler);
1809 c->label = create_artificial_label (UNKNOWN_LOCATION);
1810 x = gimple_build_label (c->label);
1811 gimple_seq_add_stmt (&new_seq, x);
1813 gimple_seq_add_seq (&new_seq, handler);
1815 if (gimple_seq_may_fallthru (new_seq))
1817 if (!out_label)
1818 out_label = create_artificial_label (try_catch_loc);
1820 x = gimple_build_goto (out_label);
1821 gimple_seq_add_stmt (&new_seq, x);
1823 if (!c->type_list)
1824 break;
1827 gimple_try_set_cleanup (tp, new_seq);
1829 gimple_seq new_eh_seq = eh_seq;
1830 eh_seq = old_eh_seq;
1831 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1832 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1833 return ret_seq;
1836 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1837 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1838 region trees that record all the magic. */
1840 static gimple_seq
1841 lower_eh_filter (struct leh_state *state, gtry *tp)
1843 struct leh_state this_state = *state;
1844 eh_region this_region = NULL;
1845 gimple *inner, *x;
1846 gimple_seq new_seq;
1848 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1850 if (flag_exceptions)
1852 this_region = gen_eh_region_allowed (state->cur_region,
1853 gimple_eh_filter_types (inner));
1854 this_state.cur_region = this_region;
1857 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1859 if (!eh_region_may_contain_throw (this_region))
1860 return gimple_try_eval (tp);
1862 new_seq = NULL;
1863 this_state.cur_region = state->cur_region;
1864 this_state.ehp_region = this_region;
1866 emit_eh_dispatch (&new_seq, this_region);
1867 emit_resx (&new_seq, this_region);
1869 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1870 x = gimple_build_label (this_region->u.allowed.label);
1871 gimple_seq_add_stmt (&new_seq, x);
1873 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1874 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1876 gimple_try_set_cleanup (tp, new_seq);
1878 return frob_into_branch_around (tp, this_region, NULL);
1881 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1882 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1883 plus the exception region trees that record all the magic. */
1885 static gimple_seq
1886 lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
1888 struct leh_state this_state = *state;
1890 if (flag_exceptions)
1892 gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1893 eh_region this_region;
1895 this_region = gen_eh_region_must_not_throw (state->cur_region);
1896 this_region->u.must_not_throw.failure_decl
1897 = gimple_eh_must_not_throw_fndecl (
1898 as_a <geh_mnt *> (inner));
1899 this_region->u.must_not_throw.failure_loc
1900 = LOCATION_LOCUS (gimple_location (tp));
1902 /* In order to get mangling applied to this decl, we must mark it
1903 used now. Otherwise, pass_ipa_free_lang_data won't think it
1904 needs to happen. */
1905 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1907 this_state.cur_region = this_region;
1910 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1912 return gimple_try_eval (tp);
1915 /* Implement a cleanup expression. This is similar to try-finally,
1916 except that we only execute the cleanup block for exception edges. */
1918 static gimple_seq
1919 lower_cleanup (struct leh_state *state, gtry *tp)
1921 struct leh_state this_state = *state;
1922 eh_region this_region = NULL;
1923 struct leh_tf_state fake_tf;
1924 gimple_seq result;
1925 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1927 if (flag_exceptions && !cleanup_dead)
1929 this_region = gen_eh_region_cleanup (state->cur_region);
1930 this_state.cur_region = this_region;
1933 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1935 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1936 return gimple_try_eval (tp);
1938 /* Build enough of a try-finally state so that we can reuse
1939 honor_protect_cleanup_actions. */
1940 memset (&fake_tf, 0, sizeof (fake_tf));
1941 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1942 fake_tf.outer = state;
1943 fake_tf.region = this_region;
1944 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1945 fake_tf.may_throw = true;
1947 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1949 if (fake_tf.may_throw)
1951 /* In this case honor_protect_cleanup_actions had nothing to do,
1952 and we should process this normally. */
1953 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1954 result = frob_into_branch_around (tp, this_region,
1955 fake_tf.fallthru_label);
1957 else
1959 /* In this case honor_protect_cleanup_actions did nearly all of
1960 the work. All we have left is to append the fallthru_label. */
1962 result = gimple_try_eval (tp);
1963 if (fake_tf.fallthru_label)
1965 gimple *x = gimple_build_label (fake_tf.fallthru_label);
1966 gimple_seq_add_stmt (&result, x);
1969 return result;
1972 /* Main loop for lowering eh constructs. Also moves gsi to the next
1973 statement. */
1975 static void
1976 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1978 gimple_seq replace;
1979 gimple *x;
1980 gimple *stmt = gsi_stmt (*gsi);
1982 switch (gimple_code (stmt))
1984 case GIMPLE_CALL:
1986 tree fndecl = gimple_call_fndecl (stmt);
1987 tree rhs, lhs;
1989 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1990 switch (DECL_FUNCTION_CODE (fndecl))
1992 case BUILT_IN_EH_POINTER:
1993 /* The front end may have generated a call to
1994 __builtin_eh_pointer (0) within a catch region. Replace
1995 this zero argument with the current catch region number. */
1996 if (state->ehp_region)
1998 tree nr = build_int_cst (integer_type_node,
1999 state->ehp_region->index);
2000 gimple_call_set_arg (stmt, 0, nr);
2002 else
2004 /* The user has dome something silly. Remove it. */
2005 rhs = null_pointer_node;
2006 goto do_replace;
2008 break;
2010 case BUILT_IN_EH_FILTER:
2011 /* ??? This should never appear, but since it's a builtin it
2012 is accessible to abuse by users. Just remove it and
2013 replace the use with the arbitrary value zero. */
2014 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2015 do_replace:
2016 lhs = gimple_call_lhs (stmt);
2017 x = gimple_build_assign (lhs, rhs);
2018 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2019 /* FALLTHRU */
2021 case BUILT_IN_EH_COPY_VALUES:
2022 /* Likewise this should not appear. Remove it. */
2023 gsi_remove (gsi, true);
2024 return;
2026 default:
2027 break;
2030 /* FALLTHRU */
2032 case GIMPLE_ASSIGN:
2033 /* If the stmt can throw use a new temporary for the assignment
2034 to a LHS. This makes sure the old value of the LHS is
2035 available on the EH edge. Only do so for statements that
2036 potentially fall through (no noreturn calls e.g.), otherwise
2037 this new assignment might create fake fallthru regions. */
2038 if (stmt_could_throw_p (stmt)
2039 && gimple_has_lhs (stmt)
2040 && gimple_stmt_may_fallthru (stmt)
2041 && !tree_could_throw_p (gimple_get_lhs (stmt))
2042 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2044 tree lhs = gimple_get_lhs (stmt);
2045 tree tmp = create_tmp_var (TREE_TYPE (lhs));
2046 gimple *s = gimple_build_assign (lhs, tmp);
2047 gimple_set_location (s, gimple_location (stmt));
2048 gimple_set_block (s, gimple_block (stmt));
2049 gimple_set_lhs (stmt, tmp);
2050 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2051 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2052 DECL_GIMPLE_REG_P (tmp) = 1;
2053 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2055 /* Look for things that can throw exceptions, and record them. */
2056 if (state->cur_region && stmt_could_throw_p (stmt))
2058 record_stmt_eh_region (state->cur_region, stmt);
2059 note_eh_region_may_contain_throw (state->cur_region);
2061 break;
2063 case GIMPLE_COND:
2064 case GIMPLE_GOTO:
2065 case GIMPLE_RETURN:
2066 maybe_record_in_goto_queue (state, stmt);
2067 break;
2069 case GIMPLE_SWITCH:
2070 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
2071 break;
2073 case GIMPLE_TRY:
2075 gtry *try_stmt = as_a <gtry *> (stmt);
2076 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2077 replace = lower_try_finally (state, try_stmt);
2078 else
2080 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2081 if (!x)
2083 replace = gimple_try_eval (try_stmt);
2084 lower_eh_constructs_1 (state, &replace);
2086 else
2087 switch (gimple_code (x))
2089 case GIMPLE_CATCH:
2090 replace = lower_catch (state, try_stmt);
2091 break;
2092 case GIMPLE_EH_FILTER:
2093 replace = lower_eh_filter (state, try_stmt);
2094 break;
2095 case GIMPLE_EH_MUST_NOT_THROW:
2096 replace = lower_eh_must_not_throw (state, try_stmt);
2097 break;
2098 case GIMPLE_EH_ELSE:
2099 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2100 gcc_unreachable ();
2101 default:
2102 replace = lower_cleanup (state, try_stmt);
2103 break;
2108 /* Remove the old stmt and insert the transformed sequence
2109 instead. */
2110 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2111 gsi_remove (gsi, true);
2113 /* Return since we don't want gsi_next () */
2114 return;
2116 case GIMPLE_EH_ELSE:
2117 /* We should be eliminating this in lower_try_finally et al. */
2118 gcc_unreachable ();
2120 default:
2121 /* A type, a decl, or some kind of statement that we're not
2122 interested in. Don't walk them. */
2123 break;
2126 gsi_next (gsi);
2129 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2131 static void
2132 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2134 gimple_stmt_iterator gsi;
2135 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2136 lower_eh_constructs_2 (state, &gsi);
2139 namespace {
2141 const pass_data pass_data_lower_eh =
2143 GIMPLE_PASS, /* type */
2144 "eh", /* name */
2145 OPTGROUP_NONE, /* optinfo_flags */
2146 TV_TREE_EH, /* tv_id */
2147 PROP_gimple_lcf, /* properties_required */
2148 PROP_gimple_leh, /* properties_provided */
2149 0, /* properties_destroyed */
2150 0, /* todo_flags_start */
2151 0, /* todo_flags_finish */
2154 class pass_lower_eh : public gimple_opt_pass
2156 public:
2157 pass_lower_eh (gcc::context *ctxt)
2158 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2161 /* opt_pass methods: */
2162 virtual unsigned int execute (function *);
2164 }; // class pass_lower_eh
2166 unsigned int
2167 pass_lower_eh::execute (function *fun)
2169 struct leh_state null_state;
2170 gimple_seq bodyp;
2172 bodyp = gimple_body (current_function_decl);
2173 if (bodyp == NULL)
2174 return 0;
2176 finally_tree = new hash_table<finally_tree_hasher> (31);
2177 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2178 memset (&null_state, 0, sizeof (null_state));
2180 collect_finally_tree_1 (bodyp, NULL);
2181 lower_eh_constructs_1 (&null_state, &bodyp);
2182 gimple_set_body (current_function_decl, bodyp);
2184 /* We assume there's a return statement, or something, at the end of
2185 the function, and thus ploping the EH sequence afterward won't
2186 change anything. */
2187 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2188 gimple_seq_add_seq (&bodyp, eh_seq);
2190 /* We assume that since BODYP already existed, adding EH_SEQ to it
2191 didn't change its value, and we don't have to re-set the function. */
2192 gcc_assert (bodyp == gimple_body (current_function_decl));
2194 delete finally_tree;
2195 finally_tree = NULL;
2196 BITMAP_FREE (eh_region_may_contain_throw_map);
2197 eh_seq = NULL;
2199 /* If this function needs a language specific EH personality routine
2200 and the frontend didn't already set one do so now. */
2201 if (function_needs_eh_personality (fun) == eh_personality_lang
2202 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2203 DECL_FUNCTION_PERSONALITY (current_function_decl)
2204 = lang_hooks.eh_personality ();
2206 return 0;
2209 } // anon namespace
2211 gimple_opt_pass *
2212 make_pass_lower_eh (gcc::context *ctxt)
2214 return new pass_lower_eh (ctxt);
2217 /* Create the multiple edges from an EH_DISPATCH statement to all of
2218 the possible handlers for its EH region. Return true if there's
2219 no fallthru edge; false if there is. */
2221 bool
2222 make_eh_dispatch_edges (geh_dispatch *stmt)
2224 eh_region r;
2225 eh_catch c;
2226 basic_block src, dst;
2228 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2229 src = gimple_bb (stmt);
2231 switch (r->type)
2233 case ERT_TRY:
2234 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2236 dst = label_to_block (c->label);
2237 make_edge (src, dst, 0);
2239 /* A catch-all handler doesn't have a fallthru. */
2240 if (c->type_list == NULL)
2241 return false;
2243 break;
2245 case ERT_ALLOWED_EXCEPTIONS:
2246 dst = label_to_block (r->u.allowed.label);
2247 make_edge (src, dst, 0);
2248 break;
2250 default:
2251 gcc_unreachable ();
2254 return true;
2257 /* Create the single EH edge from STMT to its nearest landing pad,
2258 if there is such a landing pad within the current function. */
2260 void
2261 make_eh_edges (gimple *stmt)
2263 basic_block src, dst;
2264 eh_landing_pad lp;
2265 int lp_nr;
2267 lp_nr = lookup_stmt_eh_lp (stmt);
2268 if (lp_nr <= 0)
2269 return;
2271 lp = get_eh_landing_pad_from_number (lp_nr);
2272 gcc_assert (lp != NULL);
2274 src = gimple_bb (stmt);
2275 dst = label_to_block (lp->post_landing_pad);
2276 make_edge (src, dst, EDGE_EH);
2279 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2280 do not actually perform the final edge redirection.
2282 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2283 we intend to change the destination EH region as well; this means
2284 EH_LANDING_PAD_NR must already be set on the destination block label.
2285 If false, we're being called from generic cfg manipulation code and we
2286 should preserve our place within the region tree. */
2288 static void
2289 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2291 eh_landing_pad old_lp, new_lp;
2292 basic_block old_bb;
2293 gimple *throw_stmt;
2294 int old_lp_nr, new_lp_nr;
2295 tree old_label, new_label;
2296 edge_iterator ei;
2297 edge e;
2299 old_bb = edge_in->dest;
2300 old_label = gimple_block_label (old_bb);
2301 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2302 gcc_assert (old_lp_nr > 0);
2303 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2305 throw_stmt = last_stmt (edge_in->src);
2306 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2308 new_label = gimple_block_label (new_bb);
2310 /* Look for an existing region that might be using NEW_BB already. */
2311 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2312 if (new_lp_nr)
2314 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2315 gcc_assert (new_lp);
2317 /* Unless CHANGE_REGION is true, the new and old landing pad
2318 had better be associated with the same EH region. */
2319 gcc_assert (change_region || new_lp->region == old_lp->region);
2321 else
2323 new_lp = NULL;
2324 gcc_assert (!change_region);
2327 /* Notice when we redirect the last EH edge away from OLD_BB. */
2328 FOR_EACH_EDGE (e, ei, old_bb->preds)
2329 if (e != edge_in && (e->flags & EDGE_EH))
2330 break;
2332 if (new_lp)
2334 /* NEW_LP already exists. If there are still edges into OLD_LP,
2335 there's nothing to do with the EH tree. If there are no more
2336 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2337 If CHANGE_REGION is true, then our caller is expecting to remove
2338 the landing pad. */
2339 if (e == NULL && !change_region)
2340 remove_eh_landing_pad (old_lp);
2342 else
2344 /* No correct landing pad exists. If there are no more edges
2345 into OLD_LP, then we can simply re-use the existing landing pad.
2346 Otherwise, we have to create a new landing pad. */
2347 if (e == NULL)
2349 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2350 new_lp = old_lp;
2352 else
2353 new_lp = gen_eh_landing_pad (old_lp->region);
2354 new_lp->post_landing_pad = new_label;
2355 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2358 /* Maybe move the throwing statement to the new region. */
2359 if (old_lp != new_lp)
2361 remove_stmt_from_eh_lp (throw_stmt);
2362 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2366 /* Redirect EH edge E to NEW_BB. */
2368 edge
2369 redirect_eh_edge (edge edge_in, basic_block new_bb)
2371 redirect_eh_edge_1 (edge_in, new_bb, false);
2372 return ssa_redirect_edge (edge_in, new_bb);
2375 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2376 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2377 The actual edge update will happen in the caller. */
2379 void
2380 redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
2382 tree new_lab = gimple_block_label (new_bb);
2383 bool any_changed = false;
2384 basic_block old_bb;
2385 eh_region r;
2386 eh_catch c;
2388 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2389 switch (r->type)
2391 case ERT_TRY:
2392 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2394 old_bb = label_to_block (c->label);
2395 if (old_bb == e->dest)
2397 c->label = new_lab;
2398 any_changed = true;
2401 break;
2403 case ERT_ALLOWED_EXCEPTIONS:
2404 old_bb = label_to_block (r->u.allowed.label);
2405 gcc_assert (old_bb == e->dest);
2406 r->u.allowed.label = new_lab;
2407 any_changed = true;
2408 break;
2410 default:
2411 gcc_unreachable ();
2414 gcc_assert (any_changed);
2417 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2419 bool
2420 operation_could_trap_helper_p (enum tree_code op,
2421 bool fp_operation,
2422 bool honor_trapv,
2423 bool honor_nans,
2424 bool honor_snans,
2425 tree divisor,
2426 bool *handled)
2428 *handled = true;
2429 switch (op)
2431 case TRUNC_DIV_EXPR:
2432 case CEIL_DIV_EXPR:
2433 case FLOOR_DIV_EXPR:
2434 case ROUND_DIV_EXPR:
2435 case EXACT_DIV_EXPR:
2436 case CEIL_MOD_EXPR:
2437 case FLOOR_MOD_EXPR:
2438 case ROUND_MOD_EXPR:
2439 case TRUNC_MOD_EXPR:
2440 case RDIV_EXPR:
2441 if (honor_snans || honor_trapv)
2442 return true;
2443 if (fp_operation)
2444 return flag_trapping_math;
2445 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2446 return true;
2447 return false;
2449 case LT_EXPR:
2450 case LE_EXPR:
2451 case GT_EXPR:
2452 case GE_EXPR:
2453 case LTGT_EXPR:
2454 /* Some floating point comparisons may trap. */
2455 return honor_nans;
2457 case EQ_EXPR:
2458 case NE_EXPR:
2459 case UNORDERED_EXPR:
2460 case ORDERED_EXPR:
2461 case UNLT_EXPR:
2462 case UNLE_EXPR:
2463 case UNGT_EXPR:
2464 case UNGE_EXPR:
2465 case UNEQ_EXPR:
2466 return honor_snans;
2468 case NEGATE_EXPR:
2469 case ABS_EXPR:
2470 case CONJ_EXPR:
2471 /* These operations don't trap with floating point. */
2472 if (honor_trapv)
2473 return true;
2474 return false;
2476 case PLUS_EXPR:
2477 case MINUS_EXPR:
2478 case MULT_EXPR:
2479 /* Any floating arithmetic may trap. */
2480 if (fp_operation && flag_trapping_math)
2481 return true;
2482 if (honor_trapv)
2483 return true;
2484 return false;
2486 case COMPLEX_EXPR:
2487 case CONSTRUCTOR:
2488 /* Constructing an object cannot trap. */
2489 return false;
2491 default:
2492 /* Any floating arithmetic may trap. */
2493 if (fp_operation && flag_trapping_math)
2494 return true;
2496 *handled = false;
2497 return false;
2501 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2502 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2503 type operands that may trap. If OP is a division operator, DIVISOR contains
2504 the value of the divisor. */
2506 bool
2507 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2508 tree divisor)
2510 bool honor_nans = (fp_operation && flag_trapping_math
2511 && !flag_finite_math_only);
2512 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2513 bool handled;
2515 if (TREE_CODE_CLASS (op) != tcc_comparison
2516 && TREE_CODE_CLASS (op) != tcc_unary
2517 && TREE_CODE_CLASS (op) != tcc_binary
2518 && op != FMA_EXPR)
2519 return false;
2521 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2522 honor_nans, honor_snans, divisor,
2523 &handled);
2527 /* Returns true if it is possible to prove that the index of
2528 an array access REF (an ARRAY_REF expression) falls into the
2529 array bounds. */
2531 static bool
2532 in_array_bounds_p (tree ref)
2534 tree idx = TREE_OPERAND (ref, 1);
2535 tree min, max;
2537 if (TREE_CODE (idx) != INTEGER_CST)
2538 return false;
2540 min = array_ref_low_bound (ref);
2541 max = array_ref_up_bound (ref);
2542 if (!min
2543 || !max
2544 || TREE_CODE (min) != INTEGER_CST
2545 || TREE_CODE (max) != INTEGER_CST)
2546 return false;
2548 if (tree_int_cst_lt (idx, min)
2549 || tree_int_cst_lt (max, idx))
2550 return false;
2552 return true;
2555 /* Returns true if it is possible to prove that the range of
2556 an array access REF (an ARRAY_RANGE_REF expression) falls
2557 into the array bounds. */
2559 static bool
2560 range_in_array_bounds_p (tree ref)
2562 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2563 tree range_min, range_max, min, max;
2565 range_min = TYPE_MIN_VALUE (domain_type);
2566 range_max = TYPE_MAX_VALUE (domain_type);
2567 if (!range_min
2568 || !range_max
2569 || TREE_CODE (range_min) != INTEGER_CST
2570 || TREE_CODE (range_max) != INTEGER_CST)
2571 return false;
2573 min = array_ref_low_bound (ref);
2574 max = array_ref_up_bound (ref);
2575 if (!min
2576 || !max
2577 || TREE_CODE (min) != INTEGER_CST
2578 || TREE_CODE (max) != INTEGER_CST)
2579 return false;
2581 if (tree_int_cst_lt (range_min, min)
2582 || tree_int_cst_lt (max, range_max))
2583 return false;
2585 return true;
2588 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2589 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2590 This routine expects only GIMPLE lhs or rhs input. */
2592 bool
2593 tree_could_trap_p (tree expr)
2595 enum tree_code code;
2596 bool fp_operation = false;
2597 bool honor_trapv = false;
2598 tree t, base, div = NULL_TREE;
2600 if (!expr)
2601 return false;
2603 code = TREE_CODE (expr);
2604 t = TREE_TYPE (expr);
2606 if (t)
2608 if (COMPARISON_CLASS_P (expr))
2609 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2610 else
2611 fp_operation = FLOAT_TYPE_P (t);
2612 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2615 if (TREE_CODE_CLASS (code) == tcc_binary)
2616 div = TREE_OPERAND (expr, 1);
2617 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2618 return true;
2620 restart:
2621 switch (code)
2623 case COMPONENT_REF:
2624 case REALPART_EXPR:
2625 case IMAGPART_EXPR:
2626 case BIT_FIELD_REF:
2627 case VIEW_CONVERT_EXPR:
2628 case WITH_SIZE_EXPR:
2629 expr = TREE_OPERAND (expr, 0);
2630 code = TREE_CODE (expr);
2631 goto restart;
2633 case ARRAY_RANGE_REF:
2634 base = TREE_OPERAND (expr, 0);
2635 if (tree_could_trap_p (base))
2636 return true;
2637 if (TREE_THIS_NOTRAP (expr))
2638 return false;
2639 return !range_in_array_bounds_p (expr);
2641 case ARRAY_REF:
2642 base = TREE_OPERAND (expr, 0);
2643 if (tree_could_trap_p (base))
2644 return true;
2645 if (TREE_THIS_NOTRAP (expr))
2646 return false;
2647 return !in_array_bounds_p (expr);
2649 case TARGET_MEM_REF:
2650 case MEM_REF:
2651 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2652 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2653 return true;
2654 if (TREE_THIS_NOTRAP (expr))
2655 return false;
2656 /* We cannot prove that the access is in-bounds when we have
2657 variable-index TARGET_MEM_REFs. */
2658 if (code == TARGET_MEM_REF
2659 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2660 return true;
2661 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2663 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2664 offset_int off = mem_ref_offset (expr);
2665 if (wi::neg_p (off, SIGNED))
2666 return true;
2667 if (TREE_CODE (base) == STRING_CST)
2668 return wi::leu_p (TREE_STRING_LENGTH (base), off);
2669 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2670 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2671 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
2672 return true;
2673 /* Now we are sure the first byte of the access is inside
2674 the object. */
2675 return false;
2677 return true;
2679 case INDIRECT_REF:
2680 return !TREE_THIS_NOTRAP (expr);
2682 case ASM_EXPR:
2683 return TREE_THIS_VOLATILE (expr);
2685 case CALL_EXPR:
2686 t = get_callee_fndecl (expr);
2687 /* Assume that calls to weak functions may trap. */
2688 if (!t || !DECL_P (t))
2689 return true;
2690 if (DECL_WEAK (t))
2691 return tree_could_trap_p (t);
2692 return false;
2694 case FUNCTION_DECL:
2695 /* Assume that accesses to weak functions may trap, unless we know
2696 they are certainly defined in current TU or in some other
2697 LTO partition. */
2698 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2700 cgraph_node *node = cgraph_node::get (expr);
2701 if (node)
2702 node = node->function_symbol ();
2703 return !(node && node->in_other_partition);
2705 return false;
2707 case VAR_DECL:
2708 /* Assume that accesses to weak vars may trap, unless we know
2709 they are certainly defined in current TU or in some other
2710 LTO partition. */
2711 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2713 varpool_node *node = varpool_node::get (expr);
2714 if (node)
2715 node = node->ultimate_alias_target ();
2716 return !(node && node->in_other_partition);
2718 return false;
2720 default:
2721 return false;
2726 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2727 an assignment or a conditional) may throw. */
2729 static bool
2730 stmt_could_throw_1_p (gassign *stmt)
2732 enum tree_code code = gimple_assign_rhs_code (stmt);
2733 bool honor_nans = false;
2734 bool honor_snans = false;
2735 bool fp_operation = false;
2736 bool honor_trapv = false;
2737 tree t;
2738 size_t i;
2739 bool handled, ret;
2741 if (TREE_CODE_CLASS (code) == tcc_comparison
2742 || TREE_CODE_CLASS (code) == tcc_unary
2743 || TREE_CODE_CLASS (code) == tcc_binary
2744 || code == FMA_EXPR)
2746 if (TREE_CODE_CLASS (code) == tcc_comparison)
2747 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2748 else
2749 t = gimple_expr_type (stmt);
2750 fp_operation = FLOAT_TYPE_P (t);
2751 if (fp_operation)
2753 honor_nans = flag_trapping_math && !flag_finite_math_only;
2754 honor_snans = flag_signaling_nans != 0;
2756 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2757 honor_trapv = true;
2760 /* First check the LHS. */
2761 if (tree_could_trap_p (gimple_assign_lhs (stmt)))
2762 return true;
2764 /* Check if the main expression may trap. */
2765 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2766 honor_nans, honor_snans,
2767 gimple_assign_rhs2 (stmt),
2768 &handled);
2769 if (handled)
2770 return ret;
2772 /* If the expression does not trap, see if any of the individual operands may
2773 trap. */
2774 for (i = 1; i < gimple_num_ops (stmt); i++)
2775 if (tree_could_trap_p (gimple_op (stmt, i)))
2776 return true;
2778 return false;
2782 /* Return true if statement STMT could throw an exception. */
2784 bool
2785 stmt_could_throw_p (gimple *stmt)
2787 if (!flag_exceptions)
2788 return false;
2790 /* The only statements that can throw an exception are assignments,
2791 conditionals, calls, resx, and asms. */
2792 switch (gimple_code (stmt))
2794 case GIMPLE_RESX:
2795 return true;
2797 case GIMPLE_CALL:
2798 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
2800 case GIMPLE_COND:
2802 if (!cfun->can_throw_non_call_exceptions)
2803 return false;
2804 gcond *cond = as_a <gcond *> (stmt);
2805 tree lhs = gimple_cond_lhs (cond);
2806 return operation_could_trap_p (gimple_cond_code (cond),
2807 FLOAT_TYPE_P (TREE_TYPE (lhs)),
2808 false, NULL_TREE);
2811 case GIMPLE_ASSIGN:
2812 if (!cfun->can_throw_non_call_exceptions
2813 || gimple_clobber_p (stmt))
2814 return false;
2815 return stmt_could_throw_1_p (as_a <gassign *> (stmt));
2817 case GIMPLE_ASM:
2818 if (!cfun->can_throw_non_call_exceptions)
2819 return false;
2820 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
2822 default:
2823 return false;
2828 /* Return true if expression T could throw an exception. */
2830 bool
2831 tree_could_throw_p (tree t)
2833 if (!flag_exceptions)
2834 return false;
2835 if (TREE_CODE (t) == MODIFY_EXPR)
2837 if (cfun->can_throw_non_call_exceptions
2838 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2839 return true;
2840 t = TREE_OPERAND (t, 1);
2843 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2844 t = TREE_OPERAND (t, 0);
2845 if (TREE_CODE (t) == CALL_EXPR)
2846 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2847 if (cfun->can_throw_non_call_exceptions)
2848 return tree_could_trap_p (t);
2849 return false;
2852 /* Return true if STMT can throw an exception that is not caught within
2853 the current function (CFUN). */
2855 bool
2856 stmt_can_throw_external (gimple *stmt)
2858 int lp_nr;
2860 if (!stmt_could_throw_p (stmt))
2861 return false;
2863 lp_nr = lookup_stmt_eh_lp (stmt);
2864 return lp_nr == 0;
2867 /* Return true if STMT can throw an exception that is caught within
2868 the current function (CFUN). */
2870 bool
2871 stmt_can_throw_internal (gimple *stmt)
2873 int lp_nr;
2875 if (!stmt_could_throw_p (stmt))
2876 return false;
2878 lp_nr = lookup_stmt_eh_lp (stmt);
2879 return lp_nr > 0;
2882 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2883 remove any entry it might have from the EH table. Return true if
2884 any change was made. */
2886 bool
2887 maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
2889 if (stmt_could_throw_p (stmt))
2890 return false;
2891 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2894 /* Likewise, but always use the current function. */
2896 bool
2897 maybe_clean_eh_stmt (gimple *stmt)
2899 return maybe_clean_eh_stmt_fn (cfun, stmt);
2902 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2903 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2904 in the table if it should be in there. Return TRUE if a replacement was
2905 done that my require an EH edge purge. */
2907 bool
2908 maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
2910 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2912 if (lp_nr != 0)
2914 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2916 if (new_stmt == old_stmt && new_stmt_could_throw)
2917 return false;
2919 remove_stmt_from_eh_lp (old_stmt);
2920 if (new_stmt_could_throw)
2922 add_stmt_to_eh_lp (new_stmt, lp_nr);
2923 return false;
2925 else
2926 return true;
2929 return false;
2932 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2933 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2934 operand is the return value of duplicate_eh_regions. */
2936 bool
2937 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
2938 struct function *old_fun, gimple *old_stmt,
2939 hash_map<void *, void *> *map,
2940 int default_lp_nr)
2942 int old_lp_nr, new_lp_nr;
2944 if (!stmt_could_throw_p (new_stmt))
2945 return false;
2947 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2948 if (old_lp_nr == 0)
2950 if (default_lp_nr == 0)
2951 return false;
2952 new_lp_nr = default_lp_nr;
2954 else if (old_lp_nr > 0)
2956 eh_landing_pad old_lp, new_lp;
2958 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2959 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
2960 new_lp_nr = new_lp->index;
2962 else
2964 eh_region old_r, new_r;
2966 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2967 new_r = static_cast<eh_region> (*map->get (old_r));
2968 new_lp_nr = -new_r->index;
2971 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2972 return true;
2975 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2976 and thus no remapping is required. */
2978 bool
2979 maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
2981 int lp_nr;
2983 if (!stmt_could_throw_p (new_stmt))
2984 return false;
2986 lp_nr = lookup_stmt_eh_lp (old_stmt);
2987 if (lp_nr == 0)
2988 return false;
2990 add_stmt_to_eh_lp (new_stmt, lp_nr);
2991 return true;
2994 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2995 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2996 this only handles handlers consisting of a single call, as that's the
2997 important case for C++: a destructor call for a particular object showing
2998 up in multiple handlers. */
3000 static bool
3001 same_handler_p (gimple_seq oneh, gimple_seq twoh)
3003 gimple_stmt_iterator gsi;
3004 gimple *ones, *twos;
3005 unsigned int ai;
3007 gsi = gsi_start (oneh);
3008 if (!gsi_one_before_end_p (gsi))
3009 return false;
3010 ones = gsi_stmt (gsi);
3012 gsi = gsi_start (twoh);
3013 if (!gsi_one_before_end_p (gsi))
3014 return false;
3015 twos = gsi_stmt (gsi);
3017 if (!is_gimple_call (ones)
3018 || !is_gimple_call (twos)
3019 || gimple_call_lhs (ones)
3020 || gimple_call_lhs (twos)
3021 || gimple_call_chain (ones)
3022 || gimple_call_chain (twos)
3023 || !gimple_call_same_target_p (ones, twos)
3024 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3025 return false;
3027 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3028 if (!operand_equal_p (gimple_call_arg (ones, ai),
3029 gimple_call_arg (twos, ai), 0))
3030 return false;
3032 return true;
3035 /* Optimize
3036 try { A() } finally { try { ~B() } catch { ~A() } }
3037 try { ... } finally { ~A() }
3038 into
3039 try { A() } catch { ~B() }
3040 try { ~B() ... } finally { ~A() }
3042 This occurs frequently in C++, where A is a local variable and B is a
3043 temporary used in the initializer for A. */
3045 static void
3046 optimize_double_finally (gtry *one, gtry *two)
3048 gimple *oneh;
3049 gimple_stmt_iterator gsi;
3050 gimple_seq cleanup;
3052 cleanup = gimple_try_cleanup (one);
3053 gsi = gsi_start (cleanup);
3054 if (!gsi_one_before_end_p (gsi))
3055 return;
3057 oneh = gsi_stmt (gsi);
3058 if (gimple_code (oneh) != GIMPLE_TRY
3059 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3060 return;
3062 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3064 gimple_seq seq = gimple_try_eval (oneh);
3066 gimple_try_set_cleanup (one, seq);
3067 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3068 seq = copy_gimple_seq_and_replace_locals (seq);
3069 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3070 gimple_try_set_eval (two, seq);
3074 /* Perform EH refactoring optimizations that are simpler to do when code
3075 flow has been lowered but EH structures haven't. */
3077 static void
3078 refactor_eh_r (gimple_seq seq)
3080 gimple_stmt_iterator gsi;
3081 gimple *one, *two;
3083 one = NULL;
3084 two = NULL;
3085 gsi = gsi_start (seq);
3086 while (1)
3088 one = two;
3089 if (gsi_end_p (gsi))
3090 two = NULL;
3091 else
3092 two = gsi_stmt (gsi);
3093 if (one && two)
3094 if (gtry *try_one = dyn_cast <gtry *> (one))
3095 if (gtry *try_two = dyn_cast <gtry *> (two))
3096 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3097 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3098 optimize_double_finally (try_one, try_two);
3099 if (one)
3100 switch (gimple_code (one))
3102 case GIMPLE_TRY:
3103 refactor_eh_r (gimple_try_eval (one));
3104 refactor_eh_r (gimple_try_cleanup (one));
3105 break;
3106 case GIMPLE_CATCH:
3107 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
3108 break;
3109 case GIMPLE_EH_FILTER:
3110 refactor_eh_r (gimple_eh_filter_failure (one));
3111 break;
3112 case GIMPLE_EH_ELSE:
3114 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3115 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3116 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3118 break;
3119 default:
3120 break;
3122 if (two)
3123 gsi_next (&gsi);
3124 else
3125 break;
3129 namespace {
3131 const pass_data pass_data_refactor_eh =
3133 GIMPLE_PASS, /* type */
3134 "ehopt", /* name */
3135 OPTGROUP_NONE, /* optinfo_flags */
3136 TV_TREE_EH, /* tv_id */
3137 PROP_gimple_lcf, /* properties_required */
3138 0, /* properties_provided */
3139 0, /* properties_destroyed */
3140 0, /* todo_flags_start */
3141 0, /* todo_flags_finish */
3144 class pass_refactor_eh : public gimple_opt_pass
3146 public:
3147 pass_refactor_eh (gcc::context *ctxt)
3148 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3151 /* opt_pass methods: */
3152 virtual bool gate (function *) { return flag_exceptions != 0; }
3153 virtual unsigned int execute (function *)
3155 refactor_eh_r (gimple_body (current_function_decl));
3156 return 0;
3159 }; // class pass_refactor_eh
3161 } // anon namespace
3163 gimple_opt_pass *
3164 make_pass_refactor_eh (gcc::context *ctxt)
3166 return new pass_refactor_eh (ctxt);
3169 /* At the end of gimple optimization, we can lower RESX. */
3171 static bool
3172 lower_resx (basic_block bb, gresx *stmt,
3173 hash_map<eh_region, tree> *mnt_map)
3175 int lp_nr;
3176 eh_region src_r, dst_r;
3177 gimple_stmt_iterator gsi;
3178 gimple *x;
3179 tree fn, src_nr;
3180 bool ret = false;
3182 lp_nr = lookup_stmt_eh_lp (stmt);
3183 if (lp_nr != 0)
3184 dst_r = get_eh_region_from_lp_number (lp_nr);
3185 else
3186 dst_r = NULL;
3188 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3189 gsi = gsi_last_bb (bb);
3191 if (src_r == NULL)
3193 /* We can wind up with no source region when pass_cleanup_eh shows
3194 that there are no entries into an eh region and deletes it, but
3195 then the block that contains the resx isn't removed. This can
3196 happen without optimization when the switch statement created by
3197 lower_try_finally_switch isn't simplified to remove the eh case.
3199 Resolve this by expanding the resx node to an abort. */
3201 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3202 x = gimple_build_call (fn, 0);
3203 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3205 while (EDGE_COUNT (bb->succs) > 0)
3206 remove_edge (EDGE_SUCC (bb, 0));
3208 else if (dst_r)
3210 /* When we have a destination region, we resolve this by copying
3211 the excptr and filter values into place, and changing the edge
3212 to immediately after the landing pad. */
3213 edge e;
3215 if (lp_nr < 0)
3217 basic_block new_bb;
3218 tree lab;
3220 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3221 the failure decl into a new block, if needed. */
3222 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3224 tree *slot = mnt_map->get (dst_r);
3225 if (slot == NULL)
3227 gimple_stmt_iterator gsi2;
3229 new_bb = create_empty_bb (bb);
3230 add_bb_to_loop (new_bb, bb->loop_father);
3231 lab = gimple_block_label (new_bb);
3232 gsi2 = gsi_start_bb (new_bb);
3234 fn = dst_r->u.must_not_throw.failure_decl;
3235 x = gimple_build_call (fn, 0);
3236 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3237 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3239 mnt_map->put (dst_r, lab);
3241 else
3243 lab = *slot;
3244 new_bb = label_to_block (lab);
3247 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3248 e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
3250 else
3252 edge_iterator ei;
3253 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3255 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3256 src_nr = build_int_cst (integer_type_node, src_r->index);
3257 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3258 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3260 /* Update the flags for the outgoing edge. */
3261 e = single_succ_edge (bb);
3262 gcc_assert (e->flags & EDGE_EH);
3263 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3264 e->probability = profile_probability::always ();
3265 e->count = bb->count;
3267 /* If there are no more EH users of the landing pad, delete it. */
3268 FOR_EACH_EDGE (e, ei, e->dest->preds)
3269 if (e->flags & EDGE_EH)
3270 break;
3271 if (e == NULL)
3273 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3274 remove_eh_landing_pad (lp);
3278 ret = true;
3280 else
3282 tree var;
3284 /* When we don't have a destination region, this exception escapes
3285 up the call chain. We resolve this by generating a call to the
3286 _Unwind_Resume library function. */
3288 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3289 with no arguments for C++ and Java. Check for that. */
3290 if (src_r->use_cxa_end_cleanup)
3292 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3293 x = gimple_build_call (fn, 0);
3294 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3296 else
3298 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3299 src_nr = build_int_cst (integer_type_node, src_r->index);
3300 x = gimple_build_call (fn, 1, src_nr);
3301 var = create_tmp_var (ptr_type_node);
3302 var = make_ssa_name (var, x);
3303 gimple_call_set_lhs (x, var);
3304 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3306 /* When exception handling is delegated to a caller function, we
3307 have to guarantee that shadow memory variables living on stack
3308 will be cleaner before control is given to a parent function. */
3309 if (sanitize_flags_p (SANITIZE_ADDRESS))
3311 tree decl
3312 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3313 gimple *g = gimple_build_call (decl, 0);
3314 gimple_set_location (g, gimple_location (stmt));
3315 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3318 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3319 x = gimple_build_call (fn, 1, var);
3320 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3323 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3326 gsi_remove (&gsi, true);
3328 return ret;
3331 namespace {
3333 const pass_data pass_data_lower_resx =
3335 GIMPLE_PASS, /* type */
3336 "resx", /* name */
3337 OPTGROUP_NONE, /* optinfo_flags */
3338 TV_TREE_EH, /* tv_id */
3339 PROP_gimple_lcf, /* properties_required */
3340 0, /* properties_provided */
3341 0, /* properties_destroyed */
3342 0, /* todo_flags_start */
3343 0, /* todo_flags_finish */
3346 class pass_lower_resx : public gimple_opt_pass
3348 public:
3349 pass_lower_resx (gcc::context *ctxt)
3350 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3353 /* opt_pass methods: */
3354 virtual bool gate (function *) { return flag_exceptions != 0; }
3355 virtual unsigned int execute (function *);
3357 }; // class pass_lower_resx
3359 unsigned
3360 pass_lower_resx::execute (function *fun)
3362 basic_block bb;
3363 bool dominance_invalidated = false;
3364 bool any_rewritten = false;
3366 hash_map<eh_region, tree> mnt_map;
3368 FOR_EACH_BB_FN (bb, fun)
3370 gimple *last = last_stmt (bb);
3371 if (last && is_gimple_resx (last))
3373 dominance_invalidated |=
3374 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
3375 any_rewritten = true;
3379 if (dominance_invalidated)
3381 free_dominance_info (CDI_DOMINATORS);
3382 free_dominance_info (CDI_POST_DOMINATORS);
3385 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3388 } // anon namespace
3390 gimple_opt_pass *
3391 make_pass_lower_resx (gcc::context *ctxt)
3393 return new pass_lower_resx (ctxt);
3396 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3397 external throw. */
3399 static void
3400 optimize_clobbers (basic_block bb)
3402 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3403 bool any_clobbers = false;
3404 bool seen_stack_restore = false;
3405 edge_iterator ei;
3406 edge e;
3408 /* Only optimize anything if the bb contains at least one clobber,
3409 ends with resx (checked by caller), optionally contains some
3410 debug stmts or labels, or at most one __builtin_stack_restore
3411 call, and has an incoming EH edge. */
3412 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3414 gimple *stmt = gsi_stmt (gsi);
3415 if (is_gimple_debug (stmt))
3416 continue;
3417 if (gimple_clobber_p (stmt))
3419 any_clobbers = true;
3420 continue;
3422 if (!seen_stack_restore
3423 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3425 seen_stack_restore = true;
3426 continue;
3428 if (gimple_code (stmt) == GIMPLE_LABEL)
3429 break;
3430 return;
3432 if (!any_clobbers)
3433 return;
3434 FOR_EACH_EDGE (e, ei, bb->preds)
3435 if (e->flags & EDGE_EH)
3436 break;
3437 if (e == NULL)
3438 return;
3439 gsi = gsi_last_bb (bb);
3440 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3442 gimple *stmt = gsi_stmt (gsi);
3443 if (!gimple_clobber_p (stmt))
3444 continue;
3445 unlink_stmt_vdef (stmt);
3446 gsi_remove (&gsi, true);
3447 release_defs (stmt);
3451 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3452 internal throw to successor BB. */
3454 static int
3455 sink_clobbers (basic_block bb)
3457 edge e;
3458 edge_iterator ei;
3459 gimple_stmt_iterator gsi, dgsi;
3460 basic_block succbb;
3461 bool any_clobbers = false;
3462 unsigned todo = 0;
3464 /* Only optimize if BB has a single EH successor and
3465 all predecessor edges are EH too. */
3466 if (!single_succ_p (bb)
3467 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3468 return 0;
3470 FOR_EACH_EDGE (e, ei, bb->preds)
3472 if ((e->flags & EDGE_EH) == 0)
3473 return 0;
3476 /* And BB contains only CLOBBER stmts before the final
3477 RESX. */
3478 gsi = gsi_last_bb (bb);
3479 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3481 gimple *stmt = gsi_stmt (gsi);
3482 if (is_gimple_debug (stmt))
3483 continue;
3484 if (gimple_code (stmt) == GIMPLE_LABEL)
3485 break;
3486 if (!gimple_clobber_p (stmt))
3487 return 0;
3488 any_clobbers = true;
3490 if (!any_clobbers)
3491 return 0;
3493 edge succe = single_succ_edge (bb);
3494 succbb = succe->dest;
3496 /* See if there is a virtual PHI node to take an updated virtual
3497 operand from. */
3498 gphi *vphi = NULL;
3499 tree vuse = NULL_TREE;
3500 for (gphi_iterator gpi = gsi_start_phis (succbb);
3501 !gsi_end_p (gpi); gsi_next (&gpi))
3503 tree res = gimple_phi_result (gpi.phi ());
3504 if (virtual_operand_p (res))
3506 vphi = gpi.phi ();
3507 vuse = res;
3508 break;
3512 dgsi = gsi_after_labels (succbb);
3513 gsi = gsi_last_bb (bb);
3514 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3516 gimple *stmt = gsi_stmt (gsi);
3517 tree lhs;
3518 if (is_gimple_debug (stmt))
3519 continue;
3520 if (gimple_code (stmt) == GIMPLE_LABEL)
3521 break;
3522 lhs = gimple_assign_lhs (stmt);
3523 /* Unfortunately we don't have dominance info updated at this
3524 point, so checking if
3525 dominated_by_p (CDI_DOMINATORS, succbb,
3526 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3527 would be too costly. Thus, avoid sinking any clobbers that
3528 refer to non-(D) SSA_NAMEs. */
3529 if (TREE_CODE (lhs) == MEM_REF
3530 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3531 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3533 unlink_stmt_vdef (stmt);
3534 gsi_remove (&gsi, true);
3535 release_defs (stmt);
3536 continue;
3539 /* As we do not change stmt order when sinking across a
3540 forwarder edge we can keep virtual operands in place. */
3541 gsi_remove (&gsi, false);
3542 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3544 /* But adjust virtual operands if we sunk across a PHI node. */
3545 if (vuse)
3547 gimple *use_stmt;
3548 imm_use_iterator iter;
3549 use_operand_p use_p;
3550 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3551 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3552 SET_USE (use_p, gimple_vdef (stmt));
3553 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3555 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3556 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3558 /* Adjust the incoming virtual operand. */
3559 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3560 SET_USE (gimple_vuse_op (stmt), vuse);
3562 /* If there isn't a single predecessor but no virtual PHI node
3563 arrange for virtual operands to be renamed. */
3564 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3565 && !single_pred_p (succbb))
3567 /* In this case there will be no use of the VDEF of this stmt.
3568 ??? Unless this is a secondary opportunity and we have not
3569 removed unreachable blocks yet, so we cannot assert this.
3570 Which also means we will end up renaming too many times. */
3571 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3572 mark_virtual_operands_for_renaming (cfun);
3573 todo |= TODO_update_ssa_only_virtuals;
3577 return todo;
3580 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3581 we have found some duplicate labels and removed some edges. */
3583 static bool
3584 lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3586 gimple_stmt_iterator gsi;
3587 int region_nr;
3588 eh_region r;
3589 tree filter, fn;
3590 gimple *x;
3591 bool redirected = false;
3593 region_nr = gimple_eh_dispatch_region (stmt);
3594 r = get_eh_region_from_number (region_nr);
3596 gsi = gsi_last_bb (src);
3598 switch (r->type)
3600 case ERT_TRY:
3602 auto_vec<tree> labels;
3603 tree default_label = NULL;
3604 eh_catch c;
3605 edge_iterator ei;
3606 edge e;
3607 hash_set<tree> seen_values;
3609 /* Collect the labels for a switch. Zero the post_landing_pad
3610 field becase we'll no longer have anything keeping these labels
3611 in existence and the optimizer will be free to merge these
3612 blocks at will. */
3613 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3615 tree tp_node, flt_node, lab = c->label;
3616 bool have_label = false;
3618 c->label = NULL;
3619 tp_node = c->type_list;
3620 flt_node = c->filter_list;
3622 if (tp_node == NULL)
3624 default_label = lab;
3625 break;
3629 /* Filter out duplicate labels that arise when this handler
3630 is shadowed by an earlier one. When no labels are
3631 attached to the handler anymore, we remove
3632 the corresponding edge and then we delete unreachable
3633 blocks at the end of this pass. */
3634 if (! seen_values.contains (TREE_VALUE (flt_node)))
3636 tree t = build_case_label (TREE_VALUE (flt_node),
3637 NULL, lab);
3638 labels.safe_push (t);
3639 seen_values.add (TREE_VALUE (flt_node));
3640 have_label = true;
3643 tp_node = TREE_CHAIN (tp_node);
3644 flt_node = TREE_CHAIN (flt_node);
3646 while (tp_node);
3647 if (! have_label)
3649 remove_edge (find_edge (src, label_to_block (lab)));
3650 redirected = true;
3654 /* Clean up the edge flags. */
3655 FOR_EACH_EDGE (e, ei, src->succs)
3657 if (e->flags & EDGE_FALLTHRU)
3659 /* If there was no catch-all, use the fallthru edge. */
3660 if (default_label == NULL)
3661 default_label = gimple_block_label (e->dest);
3662 e->flags &= ~EDGE_FALLTHRU;
3665 gcc_assert (default_label != NULL);
3667 /* Don't generate a switch if there's only a default case.
3668 This is common in the form of try { A; } catch (...) { B; }. */
3669 if (!labels.exists ())
3671 e = single_succ_edge (src);
3672 e->flags |= EDGE_FALLTHRU;
3674 else
3676 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3677 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3678 region_nr));
3679 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3680 filter = make_ssa_name (filter, x);
3681 gimple_call_set_lhs (x, filter);
3682 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3684 /* Turn the default label into a default case. */
3685 default_label = build_case_label (NULL, NULL, default_label);
3686 sort_case_labels (labels);
3688 x = gimple_build_switch (filter, default_label, labels);
3689 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3692 break;
3694 case ERT_ALLOWED_EXCEPTIONS:
3696 edge b_e = BRANCH_EDGE (src);
3697 edge f_e = FALLTHRU_EDGE (src);
3699 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3700 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3701 region_nr));
3702 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3703 filter = make_ssa_name (filter, x);
3704 gimple_call_set_lhs (x, filter);
3705 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3707 r->u.allowed.label = NULL;
3708 x = gimple_build_cond (EQ_EXPR, filter,
3709 build_int_cst (TREE_TYPE (filter),
3710 r->u.allowed.filter),
3711 NULL_TREE, NULL_TREE);
3712 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3714 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3715 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3717 break;
3719 default:
3720 gcc_unreachable ();
3723 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3724 gsi_remove (&gsi, true);
3725 return redirected;
3728 namespace {
3730 const pass_data pass_data_lower_eh_dispatch =
3732 GIMPLE_PASS, /* type */
3733 "ehdisp", /* name */
3734 OPTGROUP_NONE, /* optinfo_flags */
3735 TV_TREE_EH, /* tv_id */
3736 PROP_gimple_lcf, /* properties_required */
3737 0, /* properties_provided */
3738 0, /* properties_destroyed */
3739 0, /* todo_flags_start */
3740 0, /* todo_flags_finish */
3743 class pass_lower_eh_dispatch : public gimple_opt_pass
3745 public:
3746 pass_lower_eh_dispatch (gcc::context *ctxt)
3747 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3750 /* opt_pass methods: */
3751 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3752 virtual unsigned int execute (function *);
3754 }; // class pass_lower_eh_dispatch
3756 unsigned
3757 pass_lower_eh_dispatch::execute (function *fun)
3759 basic_block bb;
3760 int flags = 0;
3761 bool redirected = false;
3763 assign_filter_values ();
3765 FOR_EACH_BB_FN (bb, fun)
3767 gimple *last = last_stmt (bb);
3768 if (last == NULL)
3769 continue;
3770 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3772 redirected |= lower_eh_dispatch (bb,
3773 as_a <geh_dispatch *> (last));
3774 flags |= TODO_update_ssa_only_virtuals;
3776 else if (gimple_code (last) == GIMPLE_RESX)
3778 if (stmt_can_throw_external (last))
3779 optimize_clobbers (bb);
3780 else
3781 flags |= sink_clobbers (bb);
3785 if (redirected)
3786 delete_unreachable_blocks ();
3787 return flags;
3790 } // anon namespace
3792 gimple_opt_pass *
3793 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3795 return new pass_lower_eh_dispatch (ctxt);
3798 /* Walk statements, see what regions and, optionally, landing pads
3799 are really referenced.
3801 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3802 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3804 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3805 regions are marked.
3807 The caller is responsible for freeing the returned sbitmaps. */
3809 static void
3810 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3812 sbitmap r_reachable, lp_reachable;
3813 basic_block bb;
3814 bool mark_landing_pads = (lp_reachablep != NULL);
3815 gcc_checking_assert (r_reachablep != NULL);
3817 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3818 bitmap_clear (r_reachable);
3819 *r_reachablep = r_reachable;
3821 if (mark_landing_pads)
3823 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3824 bitmap_clear (lp_reachable);
3825 *lp_reachablep = lp_reachable;
3827 else
3828 lp_reachable = NULL;
3830 FOR_EACH_BB_FN (bb, cfun)
3832 gimple_stmt_iterator gsi;
3834 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3836 gimple *stmt = gsi_stmt (gsi);
3838 if (mark_landing_pads)
3840 int lp_nr = lookup_stmt_eh_lp (stmt);
3842 /* Negative LP numbers are MUST_NOT_THROW regions which
3843 are not considered BB enders. */
3844 if (lp_nr < 0)
3845 bitmap_set_bit (r_reachable, -lp_nr);
3847 /* Positive LP numbers are real landing pads, and BB enders. */
3848 else if (lp_nr > 0)
3850 gcc_assert (gsi_one_before_end_p (gsi));
3851 eh_region region = get_eh_region_from_lp_number (lp_nr);
3852 bitmap_set_bit (r_reachable, region->index);
3853 bitmap_set_bit (lp_reachable, lp_nr);
3857 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3858 switch (gimple_code (stmt))
3860 case GIMPLE_RESX:
3861 bitmap_set_bit (r_reachable,
3862 gimple_resx_region (as_a <gresx *> (stmt)));
3863 break;
3864 case GIMPLE_EH_DISPATCH:
3865 bitmap_set_bit (r_reachable,
3866 gimple_eh_dispatch_region (
3867 as_a <geh_dispatch *> (stmt)));
3868 break;
3869 case GIMPLE_CALL:
3870 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
3871 for (int i = 0; i < 2; ++i)
3873 tree rt = gimple_call_arg (stmt, i);
3874 HOST_WIDE_INT ri = tree_to_shwi (rt);
3876 gcc_assert (ri == (int)ri);
3877 bitmap_set_bit (r_reachable, ri);
3879 break;
3880 default:
3881 break;
3887 /* Remove unreachable handlers and unreachable landing pads. */
3889 static void
3890 remove_unreachable_handlers (void)
3892 sbitmap r_reachable, lp_reachable;
3893 eh_region region;
3894 eh_landing_pad lp;
3895 unsigned i;
3897 mark_reachable_handlers (&r_reachable, &lp_reachable);
3899 if (dump_file)
3901 fprintf (dump_file, "Before removal of unreachable regions:\n");
3902 dump_eh_tree (dump_file, cfun);
3903 fprintf (dump_file, "Reachable regions: ");
3904 dump_bitmap_file (dump_file, r_reachable);
3905 fprintf (dump_file, "Reachable landing pads: ");
3906 dump_bitmap_file (dump_file, lp_reachable);
3909 if (dump_file)
3911 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3912 if (region && !bitmap_bit_p (r_reachable, region->index))
3913 fprintf (dump_file,
3914 "Removing unreachable region %d\n",
3915 region->index);
3918 remove_unreachable_eh_regions (r_reachable);
3920 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3921 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3923 if (dump_file)
3924 fprintf (dump_file,
3925 "Removing unreachable landing pad %d\n",
3926 lp->index);
3927 remove_eh_landing_pad (lp);
3930 if (dump_file)
3932 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3933 dump_eh_tree (dump_file, cfun);
3934 fprintf (dump_file, "\n\n");
3937 sbitmap_free (r_reachable);
3938 sbitmap_free (lp_reachable);
3940 if (flag_checking)
3941 verify_eh_tree (cfun);
3944 /* Remove unreachable handlers if any landing pads have been removed after
3945 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3947 void
3948 maybe_remove_unreachable_handlers (void)
3950 eh_landing_pad lp;
3951 unsigned i;
3953 if (cfun->eh == NULL)
3954 return;
3956 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3957 if (lp && lp->post_landing_pad)
3959 if (label_to_block (lp->post_landing_pad) == NULL)
3961 remove_unreachable_handlers ();
3962 return;
3967 /* Remove regions that do not have landing pads. This assumes
3968 that remove_unreachable_handlers has already been run, and
3969 that we've just manipulated the landing pads since then.
3971 Preserve regions with landing pads and regions that prevent
3972 exceptions from propagating further, even if these regions
3973 are not reachable. */
3975 static void
3976 remove_unreachable_handlers_no_lp (void)
3978 eh_region region;
3979 sbitmap r_reachable;
3980 unsigned i;
3982 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3984 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3986 if (! region)
3987 continue;
3989 if (region->landing_pads != NULL
3990 || region->type == ERT_MUST_NOT_THROW)
3991 bitmap_set_bit (r_reachable, region->index);
3993 if (dump_file
3994 && !bitmap_bit_p (r_reachable, region->index))
3995 fprintf (dump_file,
3996 "Removing unreachable region %d\n",
3997 region->index);
4000 remove_unreachable_eh_regions (r_reachable);
4002 sbitmap_free (r_reachable);
4005 /* Undo critical edge splitting on an EH landing pad. Earlier, we
4006 optimisticaly split all sorts of edges, including EH edges. The
4007 optimization passes in between may not have needed them; if not,
4008 we should undo the split.
4010 Recognize this case by having one EH edge incoming to the BB and
4011 one normal edge outgoing; BB should be empty apart from the
4012 post_landing_pad label.
4014 Note that this is slightly different from the empty handler case
4015 handled by cleanup_empty_eh, in that the actual handler may yet
4016 have actual code but the landing pad has been separated from the
4017 handler. As such, cleanup_empty_eh relies on this transformation
4018 having been done first. */
4020 static bool
4021 unsplit_eh (eh_landing_pad lp)
4023 basic_block bb = label_to_block (lp->post_landing_pad);
4024 gimple_stmt_iterator gsi;
4025 edge e_in, e_out;
4027 /* Quickly check the edge counts on BB for singularity. */
4028 if (!single_pred_p (bb) || !single_succ_p (bb))
4029 return false;
4030 e_in = single_pred_edge (bb);
4031 e_out = single_succ_edge (bb);
4033 /* Input edge must be EH and output edge must be normal. */
4034 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4035 return false;
4037 /* The block must be empty except for the labels and debug insns. */
4038 gsi = gsi_after_labels (bb);
4039 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4040 gsi_next_nondebug (&gsi);
4041 if (!gsi_end_p (gsi))
4042 return false;
4044 /* The destination block must not already have a landing pad
4045 for a different region. */
4046 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4048 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4049 tree lab;
4050 int lp_nr;
4052 if (!label_stmt)
4053 break;
4054 lab = gimple_label_label (label_stmt);
4055 lp_nr = EH_LANDING_PAD_NR (lab);
4056 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4057 return false;
4060 /* The new destination block must not already be a destination of
4061 the source block, lest we merge fallthru and eh edges and get
4062 all sorts of confused. */
4063 if (find_edge (e_in->src, e_out->dest))
4064 return false;
4066 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4067 thought this should have been cleaned up by a phicprop pass, but
4068 that doesn't appear to handle virtuals. Propagate by hand. */
4069 if (!gimple_seq_empty_p (phi_nodes (bb)))
4071 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
4073 gimple *use_stmt;
4074 gphi *phi = gpi.phi ();
4075 tree lhs = gimple_phi_result (phi);
4076 tree rhs = gimple_phi_arg_def (phi, 0);
4077 use_operand_p use_p;
4078 imm_use_iterator iter;
4080 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4082 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4083 SET_USE (use_p, rhs);
4086 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4087 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4089 remove_phi_node (&gpi, true);
4093 if (dump_file && (dump_flags & TDF_DETAILS))
4094 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4095 lp->index, e_out->dest->index);
4097 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4098 a successor edge, humor it. But do the real CFG change with the
4099 predecessor of E_OUT in order to preserve the ordering of arguments
4100 to the PHI nodes in E_OUT->DEST. */
4101 redirect_eh_edge_1 (e_in, e_out->dest, false);
4102 redirect_edge_pred (e_out, e_in->src);
4103 e_out->flags = e_in->flags;
4104 e_out->probability = e_in->probability;
4105 e_out->count = e_in->count;
4106 remove_edge (e_in);
4108 return true;
4111 /* Examine each landing pad block and see if it matches unsplit_eh. */
4113 static bool
4114 unsplit_all_eh (void)
4116 bool changed = false;
4117 eh_landing_pad lp;
4118 int i;
4120 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4121 if (lp)
4122 changed |= unsplit_eh (lp);
4124 return changed;
4127 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4128 to OLD_BB to NEW_BB; return true on success, false on failure.
4130 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4131 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4132 Virtual PHIs may be deleted and marked for renaming. */
4134 static bool
4135 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4136 edge old_bb_out, bool change_region)
4138 gphi_iterator ngsi, ogsi;
4139 edge_iterator ei;
4140 edge e;
4141 bitmap ophi_handled;
4143 /* The destination block must not be a regular successor for any
4144 of the preds of the landing pad. Thus, avoid turning
4145 <..>
4146 | \ EH
4147 | <..>
4149 <..>
4150 into
4151 <..>
4152 | | EH
4153 <..>
4154 which CFG verification would choke on. See PR45172 and PR51089. */
4155 FOR_EACH_EDGE (e, ei, old_bb->preds)
4156 if (find_edge (e->src, new_bb))
4157 return false;
4159 FOR_EACH_EDGE (e, ei, old_bb->preds)
4160 redirect_edge_var_map_clear (e);
4162 ophi_handled = BITMAP_ALLOC (NULL);
4164 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4165 for the edges we're going to move. */
4166 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4168 gphi *ophi, *nphi = ngsi.phi ();
4169 tree nresult, nop;
4171 nresult = gimple_phi_result (nphi);
4172 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4174 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4175 the source ssa_name. */
4176 ophi = NULL;
4177 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4179 ophi = ogsi.phi ();
4180 if (gimple_phi_result (ophi) == nop)
4181 break;
4182 ophi = NULL;
4185 /* If we did find the corresponding PHI, copy those inputs. */
4186 if (ophi)
4188 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4189 if (!has_single_use (nop))
4191 imm_use_iterator imm_iter;
4192 use_operand_p use_p;
4194 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4196 if (!gimple_debug_bind_p (USE_STMT (use_p))
4197 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4198 || gimple_bb (USE_STMT (use_p)) != new_bb))
4199 goto fail;
4202 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4203 FOR_EACH_EDGE (e, ei, old_bb->preds)
4205 location_t oloc;
4206 tree oop;
4208 if ((e->flags & EDGE_EH) == 0)
4209 continue;
4210 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4211 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4212 redirect_edge_var_map_add (e, nresult, oop, oloc);
4215 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4216 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4217 variable is unchanged from input to the block and we can simply
4218 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4219 else
4221 location_t nloc
4222 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4223 FOR_EACH_EDGE (e, ei, old_bb->preds)
4224 redirect_edge_var_map_add (e, nresult, nop, nloc);
4228 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4229 we don't know what values from the other edges into NEW_BB to use. */
4230 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4232 gphi *ophi = ogsi.phi ();
4233 tree oresult = gimple_phi_result (ophi);
4234 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4235 goto fail;
4238 /* Finally, move the edges and update the PHIs. */
4239 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4240 if (e->flags & EDGE_EH)
4242 /* ??? CFG manipluation routines do not try to update loop
4243 form on edge redirection. Do so manually here for now. */
4244 /* If we redirect a loop entry or latch edge that will either create
4245 a multiple entry loop or rotate the loop. If the loops merge
4246 we may have created a loop with multiple latches.
4247 All of this isn't easily fixed thus cancel the affected loop
4248 and mark the other loop as possibly having multiple latches. */
4249 if (e->dest == e->dest->loop_father->header)
4251 mark_loop_for_removal (e->dest->loop_father);
4252 new_bb->loop_father->latch = NULL;
4253 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4255 redirect_eh_edge_1 (e, new_bb, change_region);
4256 redirect_edge_succ (e, new_bb);
4257 flush_pending_stmts (e);
4259 else
4260 ei_next (&ei);
4262 BITMAP_FREE (ophi_handled);
4263 return true;
4265 fail:
4266 FOR_EACH_EDGE (e, ei, old_bb->preds)
4267 redirect_edge_var_map_clear (e);
4268 BITMAP_FREE (ophi_handled);
4269 return false;
4272 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4273 old region to NEW_REGION at BB. */
4275 static void
4276 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4277 eh_landing_pad lp, eh_region new_region)
4279 gimple_stmt_iterator gsi;
4280 eh_landing_pad *pp;
4282 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4283 continue;
4284 *pp = lp->next_lp;
4286 lp->region = new_region;
4287 lp->next_lp = new_region->landing_pads;
4288 new_region->landing_pads = lp;
4290 /* Delete the RESX that was matched within the empty handler block. */
4291 gsi = gsi_last_bb (bb);
4292 unlink_stmt_vdef (gsi_stmt (gsi));
4293 gsi_remove (&gsi, true);
4295 /* Clean up E_OUT for the fallthru. */
4296 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4297 e_out->probability = profile_probability::always ();
4298 e_out->count = e_out->src->count;
4301 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4302 unsplitting than unsplit_eh was prepared to handle, e.g. when
4303 multiple incoming edges and phis are involved. */
4305 static bool
4306 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4308 gimple_stmt_iterator gsi;
4309 tree lab;
4311 /* We really ought not have totally lost everything following
4312 a landing pad label. Given that BB is empty, there had better
4313 be a successor. */
4314 gcc_assert (e_out != NULL);
4316 /* The destination block must not already have a landing pad
4317 for a different region. */
4318 lab = NULL;
4319 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4321 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4322 int lp_nr;
4324 if (!stmt)
4325 break;
4326 lab = gimple_label_label (stmt);
4327 lp_nr = EH_LANDING_PAD_NR (lab);
4328 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4329 return false;
4332 /* Attempt to move the PHIs into the successor block. */
4333 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4335 if (dump_file && (dump_flags & TDF_DETAILS))
4336 fprintf (dump_file,
4337 "Unsplit EH landing pad %d to block %i "
4338 "(via cleanup_empty_eh).\n",
4339 lp->index, e_out->dest->index);
4340 return true;
4343 return false;
4346 /* Return true if edge E_FIRST is part of an empty infinite loop
4347 or leads to such a loop through a series of single successor
4348 empty bbs. */
4350 static bool
4351 infinite_empty_loop_p (edge e_first)
4353 bool inf_loop = false;
4354 edge e;
4356 if (e_first->dest == e_first->src)
4357 return true;
4359 e_first->src->aux = (void *) 1;
4360 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4362 gimple_stmt_iterator gsi;
4363 if (e->dest->aux)
4365 inf_loop = true;
4366 break;
4368 e->dest->aux = (void *) 1;
4369 gsi = gsi_after_labels (e->dest);
4370 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4371 gsi_next_nondebug (&gsi);
4372 if (!gsi_end_p (gsi))
4373 break;
4375 e_first->src->aux = NULL;
4376 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4377 e->dest->aux = NULL;
4379 return inf_loop;
4382 /* Examine the block associated with LP to determine if it's an empty
4383 handler for its EH region. If so, attempt to redirect EH edges to
4384 an outer region. Return true the CFG was updated in any way. This
4385 is similar to jump forwarding, just across EH edges. */
4387 static bool
4388 cleanup_empty_eh (eh_landing_pad lp)
4390 basic_block bb = label_to_block (lp->post_landing_pad);
4391 gimple_stmt_iterator gsi;
4392 gimple *resx;
4393 eh_region new_region;
4394 edge_iterator ei;
4395 edge e, e_out;
4396 bool has_non_eh_pred;
4397 bool ret = false;
4398 int new_lp_nr;
4400 /* There can be zero or one edges out of BB. This is the quickest test. */
4401 switch (EDGE_COUNT (bb->succs))
4403 case 0:
4404 e_out = NULL;
4405 break;
4406 case 1:
4407 e_out = single_succ_edge (bb);
4408 break;
4409 default:
4410 return false;
4413 gsi = gsi_last_nondebug_bb (bb);
4414 resx = gsi_stmt (gsi);
4415 if (resx && is_gimple_resx (resx))
4417 if (stmt_can_throw_external (resx))
4418 optimize_clobbers (bb);
4419 else if (sink_clobbers (bb))
4420 ret = true;
4423 gsi = gsi_after_labels (bb);
4425 /* Make sure to skip debug statements. */
4426 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4427 gsi_next_nondebug (&gsi);
4429 /* If the block is totally empty, look for more unsplitting cases. */
4430 if (gsi_end_p (gsi))
4432 /* For the degenerate case of an infinite loop bail out.
4433 If bb has no successors and is totally empty, which can happen e.g.
4434 because of incorrect noreturn attribute, bail out too. */
4435 if (e_out == NULL
4436 || infinite_empty_loop_p (e_out))
4437 return ret;
4439 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4442 /* The block should consist only of a single RESX statement, modulo a
4443 preceding call to __builtin_stack_restore if there is no outgoing
4444 edge, since the call can be eliminated in this case. */
4445 resx = gsi_stmt (gsi);
4446 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4448 gsi_next_nondebug (&gsi);
4449 resx = gsi_stmt (gsi);
4451 if (!is_gimple_resx (resx))
4452 return ret;
4453 gcc_assert (gsi_one_nondebug_before_end_p (gsi));
4455 /* Determine if there are non-EH edges, or resx edges into the handler. */
4456 has_non_eh_pred = false;
4457 FOR_EACH_EDGE (e, ei, bb->preds)
4458 if (!(e->flags & EDGE_EH))
4459 has_non_eh_pred = true;
4461 /* Find the handler that's outer of the empty handler by looking at
4462 where the RESX instruction was vectored. */
4463 new_lp_nr = lookup_stmt_eh_lp (resx);
4464 new_region = get_eh_region_from_lp_number (new_lp_nr);
4466 /* If there's no destination region within the current function,
4467 redirection is trivial via removing the throwing statements from
4468 the EH region, removing the EH edges, and allowing the block
4469 to go unreachable. */
4470 if (new_region == NULL)
4472 gcc_assert (e_out == NULL);
4473 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4474 if (e->flags & EDGE_EH)
4476 gimple *stmt = last_stmt (e->src);
4477 remove_stmt_from_eh_lp (stmt);
4478 remove_edge (e);
4480 else
4481 ei_next (&ei);
4482 goto succeed;
4485 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4486 to handle the abort and allow the blocks to go unreachable. */
4487 if (new_region->type == ERT_MUST_NOT_THROW)
4489 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4490 if (e->flags & EDGE_EH)
4492 gimple *stmt = last_stmt (e->src);
4493 remove_stmt_from_eh_lp (stmt);
4494 add_stmt_to_eh_lp (stmt, new_lp_nr);
4495 remove_edge (e);
4497 else
4498 ei_next (&ei);
4499 goto succeed;
4502 /* Try to redirect the EH edges and merge the PHIs into the destination
4503 landing pad block. If the merge succeeds, we'll already have redirected
4504 all the EH edges. The handler itself will go unreachable if there were
4505 no normal edges. */
4506 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4507 goto succeed;
4509 /* Finally, if all input edges are EH edges, then we can (potentially)
4510 reduce the number of transfers from the runtime by moving the landing
4511 pad from the original region to the new region. This is a win when
4512 we remove the last CLEANUP region along a particular exception
4513 propagation path. Since nothing changes except for the region with
4514 which the landing pad is associated, the PHI nodes do not need to be
4515 adjusted at all. */
4516 if (!has_non_eh_pred)
4518 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4519 if (dump_file && (dump_flags & TDF_DETAILS))
4520 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4521 lp->index, new_region->index);
4523 /* ??? The CFG didn't change, but we may have rendered the
4524 old EH region unreachable. Trigger a cleanup there. */
4525 return true;
4528 return ret;
4530 succeed:
4531 if (dump_file && (dump_flags & TDF_DETAILS))
4532 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4533 remove_eh_landing_pad (lp);
4534 return true;
4537 /* Do a post-order traversal of the EH region tree. Examine each
4538 post_landing_pad block and see if we can eliminate it as empty. */
4540 static bool
4541 cleanup_all_empty_eh (void)
4543 bool changed = false;
4544 eh_landing_pad lp;
4545 int i;
4547 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4548 if (lp)
4549 changed |= cleanup_empty_eh (lp);
4551 return changed;
4554 /* Perform cleanups and lowering of exception handling
4555 1) cleanups regions with handlers doing nothing are optimized out
4556 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4557 3) Info about regions that are containing instructions, and regions
4558 reachable via local EH edges is collected
4559 4) Eh tree is pruned for regions no longer necessary.
4561 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4562 Unify those that have the same failure decl and locus.
4565 static unsigned int
4566 execute_cleanup_eh_1 (void)
4568 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4569 looking up unreachable landing pads. */
4570 remove_unreachable_handlers ();
4572 /* Watch out for the region tree vanishing due to all unreachable. */
4573 if (cfun->eh->region_tree)
4575 bool changed = false;
4577 if (optimize)
4578 changed |= unsplit_all_eh ();
4579 changed |= cleanup_all_empty_eh ();
4581 if (changed)
4583 free_dominance_info (CDI_DOMINATORS);
4584 free_dominance_info (CDI_POST_DOMINATORS);
4586 /* We delayed all basic block deletion, as we may have performed
4587 cleanups on EH edges while non-EH edges were still present. */
4588 delete_unreachable_blocks ();
4590 /* We manipulated the landing pads. Remove any region that no
4591 longer has a landing pad. */
4592 remove_unreachable_handlers_no_lp ();
4594 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4598 return 0;
4601 namespace {
4603 const pass_data pass_data_cleanup_eh =
4605 GIMPLE_PASS, /* type */
4606 "ehcleanup", /* name */
4607 OPTGROUP_NONE, /* optinfo_flags */
4608 TV_TREE_EH, /* tv_id */
4609 PROP_gimple_lcf, /* properties_required */
4610 0, /* properties_provided */
4611 0, /* properties_destroyed */
4612 0, /* todo_flags_start */
4613 0, /* todo_flags_finish */
4616 class pass_cleanup_eh : public gimple_opt_pass
4618 public:
4619 pass_cleanup_eh (gcc::context *ctxt)
4620 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4623 /* opt_pass methods: */
4624 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4625 virtual bool gate (function *fun)
4627 return fun->eh != NULL && fun->eh->region_tree != NULL;
4630 virtual unsigned int execute (function *);
4632 }; // class pass_cleanup_eh
4634 unsigned int
4635 pass_cleanup_eh::execute (function *fun)
4637 int ret = execute_cleanup_eh_1 ();
4639 /* If the function no longer needs an EH personality routine
4640 clear it. This exposes cross-language inlining opportunities
4641 and avoids references to a never defined personality routine. */
4642 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4643 && function_needs_eh_personality (fun) != eh_personality_lang)
4644 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4646 return ret;
4649 } // anon namespace
4651 gimple_opt_pass *
4652 make_pass_cleanup_eh (gcc::context *ctxt)
4654 return new pass_cleanup_eh (ctxt);
4657 /* Verify that BB containing STMT as the last statement, has precisely the
4658 edge that make_eh_edges would create. */
4660 DEBUG_FUNCTION bool
4661 verify_eh_edges (gimple *stmt)
4663 basic_block bb = gimple_bb (stmt);
4664 eh_landing_pad lp = NULL;
4665 int lp_nr;
4666 edge_iterator ei;
4667 edge e, eh_edge;
4669 lp_nr = lookup_stmt_eh_lp (stmt);
4670 if (lp_nr > 0)
4671 lp = get_eh_landing_pad_from_number (lp_nr);
4673 eh_edge = NULL;
4674 FOR_EACH_EDGE (e, ei, bb->succs)
4676 if (e->flags & EDGE_EH)
4678 if (eh_edge)
4680 error ("BB %i has multiple EH edges", bb->index);
4681 return true;
4683 else
4684 eh_edge = e;
4688 if (lp == NULL)
4690 if (eh_edge)
4692 error ("BB %i can not throw but has an EH edge", bb->index);
4693 return true;
4695 return false;
4698 if (!stmt_could_throw_p (stmt))
4700 error ("BB %i last statement has incorrectly set lp", bb->index);
4701 return true;
4704 if (eh_edge == NULL)
4706 error ("BB %i is missing an EH edge", bb->index);
4707 return true;
4710 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4712 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4713 return true;
4716 return false;
4719 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4721 DEBUG_FUNCTION bool
4722 verify_eh_dispatch_edge (geh_dispatch *stmt)
4724 eh_region r;
4725 eh_catch c;
4726 basic_block src, dst;
4727 bool want_fallthru = true;
4728 edge_iterator ei;
4729 edge e, fall_edge;
4731 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4732 src = gimple_bb (stmt);
4734 FOR_EACH_EDGE (e, ei, src->succs)
4735 gcc_assert (e->aux == NULL);
4737 switch (r->type)
4739 case ERT_TRY:
4740 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4742 dst = label_to_block (c->label);
4743 e = find_edge (src, dst);
4744 if (e == NULL)
4746 error ("BB %i is missing an edge", src->index);
4747 return true;
4749 e->aux = (void *)e;
4751 /* A catch-all handler doesn't have a fallthru. */
4752 if (c->type_list == NULL)
4754 want_fallthru = false;
4755 break;
4758 break;
4760 case ERT_ALLOWED_EXCEPTIONS:
4761 dst = label_to_block (r->u.allowed.label);
4762 e = find_edge (src, dst);
4763 if (e == NULL)
4765 error ("BB %i is missing an edge", src->index);
4766 return true;
4768 e->aux = (void *)e;
4769 break;
4771 default:
4772 gcc_unreachable ();
4775 fall_edge = NULL;
4776 FOR_EACH_EDGE (e, ei, src->succs)
4778 if (e->flags & EDGE_FALLTHRU)
4780 if (fall_edge != NULL)
4782 error ("BB %i too many fallthru edges", src->index);
4783 return true;
4785 fall_edge = e;
4787 else if (e->aux)
4788 e->aux = NULL;
4789 else
4791 error ("BB %i has incorrect edge", src->index);
4792 return true;
4795 if ((fall_edge != NULL) ^ want_fallthru)
4797 error ("BB %i has incorrect fallthru edge", src->index);
4798 return true;
4801 return false;