* regex.c (wcs_re_match_2_internal, byte_re_match_2_internal):
[official-gcc.git] / gcc / tree-cfg.c
bloba9a9fdcab738e6928591c2f0d16d3e2fb4c36961
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void create_block_annotation (basic_block);
98 static void free_blocks_annotations (void);
99 static void clear_blocks_annotations (void);
100 static void make_blocks (tree);
101 static void factor_computed_gotos (void);
103 /* Edges. */
104 static void make_edges (void);
105 static void make_ctrl_stmt_edges (basic_block);
106 static void make_exit_edges (basic_block);
107 static void make_cond_expr_edges (basic_block);
108 static void make_switch_expr_edges (basic_block);
109 static void make_goto_expr_edges (basic_block);
110 static edge tree_redirect_edge_and_branch (edge, basic_block);
111 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
112 static void split_critical_edges (void);
113 static bool remove_fallthru_edge (VEC(edge) *);
115 /* Various helpers. */
116 static inline bool stmt_starts_bb_p (tree, tree);
117 static int tree_verify_flow_info (void);
118 static void tree_make_forwarder_block (edge);
119 static bool tree_forwarder_block_p (basic_block, bool);
120 static void tree_cfg2vcg (FILE *);
122 /* Flowgraph optimization and cleanup. */
123 static void tree_merge_blocks (basic_block, basic_block);
124 static bool tree_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static bool cleanup_control_flow (void);
127 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
128 static edge find_taken_edge_computed_goto (basic_block, tree);
129 static edge find_taken_edge_cond_expr (basic_block, tree);
130 static edge find_taken_edge_switch_expr (basic_block, tree);
131 static tree find_case_label_for_value (tree, tree);
132 static bool phi_alternatives_equal (basic_block, edge, edge);
133 static bool cleanup_forwarder_blocks (void);
136 /*---------------------------------------------------------------------------
137 Create basic blocks
138 ---------------------------------------------------------------------------*/
140 /* Entry point to the CFG builder for trees. TP points to the list of
141 statements to be added to the flowgraph. */
143 static void
144 build_tree_cfg (tree *tp)
146 /* Register specific tree functions. */
147 tree_register_cfg_hooks ();
149 /* Initialize the basic block array. */
150 init_flow ();
151 profile_status = PROFILE_ABSENT;
152 n_basic_blocks = 0;
153 last_basic_block = 0;
154 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
155 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
157 /* Build a mapping of labels to their associated blocks. */
158 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
159 "label to block map");
161 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
162 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
164 found_computed_goto = 0;
165 make_blocks (*tp);
167 /* Computed gotos are hell to deal with, especially if there are
168 lots of them with a large number of destinations. So we factor
169 them to a common computed goto location before we build the
170 edge list. After we convert back to normal form, we will un-factor
171 the computed gotos since factoring introduces an unwanted jump. */
172 if (found_computed_goto)
173 factor_computed_gotos ();
175 /* Make sure there is always at least one block, even if it's empty. */
176 if (n_basic_blocks == 0)
177 create_empty_bb (ENTRY_BLOCK_PTR);
179 create_block_annotation (ENTRY_BLOCK_PTR);
180 create_block_annotation (EXIT_BLOCK_PTR);
182 /* Adjust the size of the array. */
183 VARRAY_GROW (basic_block_info, n_basic_blocks);
185 /* To speed up statement iterator walks, we first purge dead labels. */
186 cleanup_dead_labels ();
188 /* Group case nodes to reduce the number of edges.
189 We do this after cleaning up dead labels because otherwise we miss
190 a lot of obvious case merging opportunities. */
191 group_case_labels ();
193 /* Create the edges of the flowgraph. */
194 make_edges ();
196 /* Debugging dumps. */
198 /* Write the flowgraph to a VCG file. */
200 int local_dump_flags;
201 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
202 if (dump_file)
204 tree_cfg2vcg (dump_file);
205 dump_end (TDI_vcg, dump_file);
209 /* Dump a textual representation of the flowgraph. */
210 if (dump_file)
211 dump_tree_cfg (dump_file, dump_flags);
214 static void
215 execute_build_cfg (void)
217 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
220 struct tree_opt_pass pass_build_cfg =
222 "cfg", /* name */
223 NULL, /* gate */
224 execute_build_cfg, /* execute */
225 NULL, /* sub */
226 NULL, /* next */
227 0, /* static_pass_number */
228 TV_TREE_CFG, /* tv_id */
229 PROP_gimple_leh, /* properties_required */
230 PROP_cfg, /* properties_provided */
231 0, /* properties_destroyed */
232 0, /* todo_flags_start */
233 TODO_verify_stmts, /* todo_flags_finish */
234 0 /* letter */
237 /* Search the CFG for any computed gotos. If found, factor them to a
238 common computed goto site. Also record the location of that site so
239 that we can un-factor the gotos after we have converted back to
240 normal form. */
242 static void
243 factor_computed_gotos (void)
245 basic_block bb;
246 tree factored_label_decl = NULL;
247 tree var = NULL;
248 tree factored_computed_goto_label = NULL;
249 tree factored_computed_goto = NULL;
251 /* We know there are one or more computed gotos in this function.
252 Examine the last statement in each basic block to see if the block
253 ends with a computed goto. */
255 FOR_EACH_BB (bb)
257 block_stmt_iterator bsi = bsi_last (bb);
258 tree last;
260 if (bsi_end_p (bsi))
261 continue;
262 last = bsi_stmt (bsi);
264 /* Ignore the computed goto we create when we factor the original
265 computed gotos. */
266 if (last == factored_computed_goto)
267 continue;
269 /* If the last statement is a computed goto, factor it. */
270 if (computed_goto_p (last))
272 tree assignment;
274 /* The first time we find a computed goto we need to create
275 the factored goto block and the variable each original
276 computed goto will use for their goto destination. */
277 if (! factored_computed_goto)
279 basic_block new_bb = create_empty_bb (bb);
280 block_stmt_iterator new_bsi = bsi_start (new_bb);
282 /* Create the destination of the factored goto. Each original
283 computed goto will put its desired destination into this
284 variable and jump to the label we create immediately
285 below. */
286 var = create_tmp_var (ptr_type_node, "gotovar");
288 /* Build a label for the new block which will contain the
289 factored computed goto. */
290 factored_label_decl = create_artificial_label ();
291 factored_computed_goto_label
292 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
293 bsi_insert_after (&new_bsi, factored_computed_goto_label,
294 BSI_NEW_STMT);
296 /* Build our new computed goto. */
297 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
298 bsi_insert_after (&new_bsi, factored_computed_goto,
299 BSI_NEW_STMT);
302 /* Copy the original computed goto's destination into VAR. */
303 assignment = build (MODIFY_EXPR, ptr_type_node,
304 var, GOTO_DESTINATION (last));
305 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
307 /* And re-vector the computed goto to the new destination. */
308 GOTO_DESTINATION (last) = factored_label_decl;
314 /* Create annotations for a single basic block. */
316 static void
317 create_block_annotation (basic_block bb)
319 /* Verify that the tree_annotations field is clear. */
320 gcc_assert (!bb->tree_annotations);
321 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
325 /* Free the annotations for all the basic blocks. */
327 static void free_blocks_annotations (void)
329 clear_blocks_annotations ();
333 /* Clear the annotations for all the basic blocks. */
335 static void
336 clear_blocks_annotations (void)
338 basic_block bb;
340 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
341 bb->tree_annotations = NULL;
345 /* Build a flowgraph for the statement_list STMT_LIST. */
347 static void
348 make_blocks (tree stmt_list)
350 tree_stmt_iterator i = tsi_start (stmt_list);
351 tree stmt = NULL;
352 bool start_new_block = true;
353 bool first_stmt_of_list = true;
354 basic_block bb = ENTRY_BLOCK_PTR;
356 while (!tsi_end_p (i))
358 tree prev_stmt;
360 prev_stmt = stmt;
361 stmt = tsi_stmt (i);
363 /* If the statement starts a new basic block or if we have determined
364 in a previous pass that we need to create a new block for STMT, do
365 so now. */
366 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
368 if (!first_stmt_of_list)
369 stmt_list = tsi_split_statement_list_before (&i);
370 bb = create_basic_block (stmt_list, NULL, bb);
371 start_new_block = false;
374 /* Now add STMT to BB and create the subgraphs for special statement
375 codes. */
376 set_bb_for_stmt (stmt, bb);
378 if (computed_goto_p (stmt))
379 found_computed_goto = true;
381 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
382 next iteration. */
383 if (stmt_ends_bb_p (stmt))
384 start_new_block = true;
386 tsi_next (&i);
387 first_stmt_of_list = false;
392 /* Create and return a new empty basic block after bb AFTER. */
394 static basic_block
395 create_bb (void *h, void *e, basic_block after)
397 basic_block bb;
399 gcc_assert (!e);
401 /* Create and initialize a new basic block. Since alloc_block uses
402 ggc_alloc_cleared to allocate a basic block, we do not have to
403 clear the newly allocated basic block here. */
404 bb = alloc_block ();
406 bb->index = last_basic_block;
407 bb->flags = BB_NEW;
408 bb->stmt_list = h ? h : alloc_stmt_list ();
410 /* Add the new block to the linked list of blocks. */
411 link_block (bb, after);
413 /* Grow the basic block array if needed. */
414 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
416 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
417 VARRAY_GROW (basic_block_info, new_size);
420 /* Add the newly created block to the array. */
421 BASIC_BLOCK (last_basic_block) = bb;
423 create_block_annotation (bb);
425 n_basic_blocks++;
426 last_basic_block++;
428 initialize_bb_rbi (bb);
429 return bb;
433 /*---------------------------------------------------------------------------
434 Edge creation
435 ---------------------------------------------------------------------------*/
437 /* Fold COND_EXPR_COND of each COND_EXPR. */
439 static void
440 fold_cond_expr_cond (void)
442 basic_block bb;
444 FOR_EACH_BB (bb)
446 tree stmt = last_stmt (bb);
448 if (stmt
449 && TREE_CODE (stmt) == COND_EXPR)
451 tree cond = fold (COND_EXPR_COND (stmt));
452 if (integer_zerop (cond))
453 COND_EXPR_COND (stmt) = boolean_false_node;
454 else if (integer_onep (cond))
455 COND_EXPR_COND (stmt) = boolean_true_node;
460 /* Join all the blocks in the flowgraph. */
462 static void
463 make_edges (void)
465 basic_block bb;
467 /* Create an edge from entry to the first block with executable
468 statements in it. */
469 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
471 /* Traverse the basic block array placing edges. */
472 FOR_EACH_BB (bb)
474 tree first = first_stmt (bb);
475 tree last = last_stmt (bb);
477 if (first)
479 /* Edges for statements that always alter flow control. */
480 if (is_ctrl_stmt (last))
481 make_ctrl_stmt_edges (bb);
483 /* Edges for statements that sometimes alter flow control. */
484 if (is_ctrl_altering_stmt (last))
485 make_exit_edges (bb);
488 /* Finally, if no edges were created above, this is a regular
489 basic block that only needs a fallthru edge. */
490 if (EDGE_COUNT (bb->succs) == 0)
491 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
494 /* We do not care about fake edges, so remove any that the CFG
495 builder inserted for completeness. */
496 remove_fake_exit_edges ();
498 /* Fold COND_EXPR_COND of each COND_EXPR. */
499 fold_cond_expr_cond ();
501 /* Clean up the graph and warn for unreachable code. */
502 cleanup_tree_cfg ();
506 /* Create edges for control statement at basic block BB. */
508 static void
509 make_ctrl_stmt_edges (basic_block bb)
511 tree last = last_stmt (bb);
513 gcc_assert (last);
514 switch (TREE_CODE (last))
516 case GOTO_EXPR:
517 make_goto_expr_edges (bb);
518 break;
520 case RETURN_EXPR:
521 make_edge (bb, EXIT_BLOCK_PTR, 0);
522 break;
524 case COND_EXPR:
525 make_cond_expr_edges (bb);
526 break;
528 case SWITCH_EXPR:
529 make_switch_expr_edges (bb);
530 break;
532 case RESX_EXPR:
533 make_eh_edges (last);
534 /* Yet another NORETURN hack. */
535 if (EDGE_COUNT (bb->succs) == 0)
536 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
537 break;
539 default:
540 gcc_unreachable ();
545 /* Create exit edges for statements in block BB that alter the flow of
546 control. Statements that alter the control flow are 'goto', 'return'
547 and calls to non-returning functions. */
549 static void
550 make_exit_edges (basic_block bb)
552 tree last = last_stmt (bb), op;
554 gcc_assert (last);
555 switch (TREE_CODE (last))
557 case CALL_EXPR:
558 /* If this function receives a nonlocal goto, then we need to
559 make edges from this call site to all the nonlocal goto
560 handlers. */
561 if (TREE_SIDE_EFFECTS (last)
562 && current_function_has_nonlocal_label)
563 make_goto_expr_edges (bb);
565 /* If this statement has reachable exception handlers, then
566 create abnormal edges to them. */
567 make_eh_edges (last);
569 /* Some calls are known not to return. For such calls we create
570 a fake edge.
572 We really need to revamp how we build edges so that it's not
573 such a bloody pain to avoid creating edges for this case since
574 all we do is remove these edges when we're done building the
575 CFG. */
576 if (call_expr_flags (last) & ECF_NORETURN)
578 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
579 return;
582 /* Don't forget the fall-thru edge. */
583 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
584 break;
586 case MODIFY_EXPR:
587 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
588 may have an abnormal edge. Search the RHS for this case and
589 create any required edges. */
590 op = get_call_expr_in (last);
591 if (op && TREE_SIDE_EFFECTS (op)
592 && current_function_has_nonlocal_label)
593 make_goto_expr_edges (bb);
595 make_eh_edges (last);
596 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
597 break;
599 default:
600 gcc_unreachable ();
605 /* Create the edges for a COND_EXPR starting at block BB.
606 At this point, both clauses must contain only simple gotos. */
608 static void
609 make_cond_expr_edges (basic_block bb)
611 tree entry = last_stmt (bb);
612 basic_block then_bb, else_bb;
613 tree then_label, else_label;
615 gcc_assert (entry);
616 gcc_assert (TREE_CODE (entry) == COND_EXPR);
618 /* Entry basic blocks for each component. */
619 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
620 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
621 then_bb = label_to_block (then_label);
622 else_bb = label_to_block (else_label);
624 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
625 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
628 /* Hashing routine for EDGE_TO_CASES. */
630 static hashval_t
631 edge_to_cases_hash (const void *p)
633 edge e = ((struct edge_to_cases_elt *)p)->e;
635 /* Hash on the edge itself (which is a pointer). */
636 return htab_hash_pointer (e);
639 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
640 for equality is just a pointer comparison. */
642 static int
643 edge_to_cases_eq (const void *p1, const void *p2)
645 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
646 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
648 return e1 == e2;
651 /* Called for each element in the hash table (P) as we delete the
652 edge to cases hash table.
654 Clear all the TREE_CHAINs to prevent problems with copying of
655 SWITCH_EXPRs and structure sharing rules, then free the hash table
656 element. */
658 static void
659 edge_to_cases_cleanup (void *p)
661 struct edge_to_cases_elt *elt = p;
662 tree t, next;
664 for (t = elt->case_labels; t; t = next)
666 next = TREE_CHAIN (t);
667 TREE_CHAIN (t) = NULL;
669 free (p);
672 /* Start recording information mapping edges to case labels. */
674 static void
675 start_recording_case_labels (void)
677 gcc_assert (edge_to_cases == NULL);
679 edge_to_cases = htab_create (37,
680 edge_to_cases_hash,
681 edge_to_cases_eq,
682 edge_to_cases_cleanup);
685 /* Return nonzero if we are recording information for case labels. */
687 static bool
688 recording_case_labels_p (void)
690 return (edge_to_cases != NULL);
693 /* Stop recording information mapping edges to case labels and
694 remove any information we have recorded. */
695 static void
696 end_recording_case_labels (void)
698 htab_delete (edge_to_cases);
699 edge_to_cases = NULL;
702 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
704 static void
705 record_switch_edge (edge e, tree case_label)
707 struct edge_to_cases_elt *elt;
708 void **slot;
710 /* Build a hash table element so we can see if E is already
711 in the table. */
712 elt = xmalloc (sizeof (struct edge_to_cases_elt));
713 elt->e = e;
714 elt->case_labels = case_label;
716 slot = htab_find_slot (edge_to_cases, elt, INSERT);
718 if (*slot == NULL)
720 /* E was not in the hash table. Install E into the hash table. */
721 *slot = (void *)elt;
723 else
725 /* E was already in the hash table. Free ELT as we do not need it
726 anymore. */
727 free (elt);
729 /* Get the entry stored in the hash table. */
730 elt = (struct edge_to_cases_elt *) *slot;
732 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
733 TREE_CHAIN (case_label) = elt->case_labels;
734 elt->case_labels = case_label;
738 /* If we are inside a {start,end}_recording_cases block, then return
739 a chain of CASE_LABEL_EXPRs from T which reference E.
741 Otherwise return NULL. */
743 static tree
744 get_cases_for_edge (edge e, tree t)
746 struct edge_to_cases_elt elt, *elt_p;
747 void **slot;
748 size_t i, n;
749 tree vec;
751 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
752 chains available. Return NULL so the caller can detect this case. */
753 if (!recording_case_labels_p ())
754 return NULL;
756 restart:
757 elt.e = e;
758 elt.case_labels = NULL;
759 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
761 if (slot)
763 elt_p = (struct edge_to_cases_elt *)*slot;
764 return elt_p->case_labels;
767 /* If we did not find E in the hash table, then this must be the first
768 time we have been queried for information about E & T. Add all the
769 elements from T to the hash table then perform the query again. */
771 vec = SWITCH_LABELS (t);
772 n = TREE_VEC_LENGTH (vec);
773 for (i = 0; i < n; i++)
775 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
776 basic_block label_bb = label_to_block (lab);
777 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
779 goto restart;
782 /* Create the edges for a SWITCH_EXPR starting at block BB.
783 At this point, the switch body has been lowered and the
784 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
786 static void
787 make_switch_expr_edges (basic_block bb)
789 tree entry = last_stmt (bb);
790 size_t i, n;
791 tree vec;
793 vec = SWITCH_LABELS (entry);
794 n = TREE_VEC_LENGTH (vec);
796 for (i = 0; i < n; ++i)
798 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
799 basic_block label_bb = label_to_block (lab);
800 make_edge (bb, label_bb, 0);
805 /* Return the basic block holding label DEST. */
807 basic_block
808 label_to_block_fn (struct function *ifun, tree dest)
810 int uid = LABEL_DECL_UID (dest);
812 /* We would die hard when faced by an undefined label. Emit a label to
813 the very first basic block. This will hopefully make even the dataflow
814 and undefined variable warnings quite right. */
815 if ((errorcount || sorrycount) && uid < 0)
817 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
818 tree stmt;
820 stmt = build1 (LABEL_EXPR, void_type_node, dest);
821 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
822 uid = LABEL_DECL_UID (dest);
824 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
827 /* Create edges for a goto statement at block BB. */
829 static void
830 make_goto_expr_edges (basic_block bb)
832 tree goto_t;
833 basic_block target_bb;
834 int for_call;
835 block_stmt_iterator last = bsi_last (bb);
837 goto_t = bsi_stmt (last);
839 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
840 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
841 from a nonlocal goto. */
842 if (TREE_CODE (goto_t) != GOTO_EXPR)
843 for_call = 1;
844 else
846 tree dest = GOTO_DESTINATION (goto_t);
847 for_call = 0;
849 /* A GOTO to a local label creates normal edges. */
850 if (simple_goto_p (goto_t))
852 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
853 #ifdef USE_MAPPED_LOCATION
854 e->goto_locus = EXPR_LOCATION (goto_t);
855 #else
856 e->goto_locus = EXPR_LOCUS (goto_t);
857 #endif
858 bsi_remove (&last);
859 return;
862 /* Nothing more to do for nonlocal gotos. */
863 if (TREE_CODE (dest) == LABEL_DECL)
864 return;
866 /* Computed gotos remain. */
869 /* Look for the block starting with the destination label. In the
870 case of a computed goto, make an edge to any label block we find
871 in the CFG. */
872 FOR_EACH_BB (target_bb)
874 block_stmt_iterator bsi;
876 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
878 tree target = bsi_stmt (bsi);
880 if (TREE_CODE (target) != LABEL_EXPR)
881 break;
883 if (
884 /* Computed GOTOs. Make an edge to every label block that has
885 been marked as a potential target for a computed goto. */
886 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
887 /* Nonlocal GOTO target. Make an edge to every label block
888 that has been marked as a potential target for a nonlocal
889 goto. */
890 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
892 make_edge (bb, target_bb, EDGE_ABNORMAL);
893 break;
898 /* Degenerate case of computed goto with no labels. */
899 if (!for_call && EDGE_COUNT (bb->succs) == 0)
900 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
904 /*---------------------------------------------------------------------------
905 Flowgraph analysis
906 ---------------------------------------------------------------------------*/
908 /* Remove unreachable blocks and other miscellaneous clean up work. */
910 bool
911 cleanup_tree_cfg (void)
913 bool retval = false;
915 timevar_push (TV_TREE_CLEANUP_CFG);
917 retval = cleanup_control_flow ();
918 retval |= delete_unreachable_blocks ();
920 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
921 which can get expensive. So we want to enable recording of edge
922 to CASE_LABEL_EXPR mappings around the call to
923 cleanup_forwarder_blocks. */
924 start_recording_case_labels ();
925 retval |= cleanup_forwarder_blocks ();
926 end_recording_case_labels ();
928 #ifdef ENABLE_CHECKING
929 if (retval)
931 gcc_assert (!cleanup_control_flow ());
932 gcc_assert (!delete_unreachable_blocks ());
933 gcc_assert (!cleanup_forwarder_blocks ());
935 #endif
937 /* Merging the blocks creates no new opportunities for the other
938 optimizations, so do it here. */
939 retval |= merge_seq_blocks ();
941 compact_blocks ();
943 #ifdef ENABLE_CHECKING
944 verify_flow_info ();
945 #endif
946 timevar_pop (TV_TREE_CLEANUP_CFG);
947 return retval;
951 /* Cleanup cfg and repair loop structures. */
953 void
954 cleanup_tree_cfg_loop (void)
956 bitmap changed_bbs = BITMAP_ALLOC (NULL);
958 cleanup_tree_cfg ();
960 fix_loop_structure (current_loops, changed_bbs);
961 calculate_dominance_info (CDI_DOMINATORS);
963 /* This usually does nothing. But sometimes parts of cfg that originally
964 were inside a loop get out of it due to edge removal (since they
965 become unreachable by back edges from latch). */
966 rewrite_into_loop_closed_ssa (changed_bbs);
968 BITMAP_FREE (changed_bbs);
970 #ifdef ENABLE_CHECKING
971 verify_loop_structure (current_loops);
972 #endif
975 /* Cleanup useless labels in basic blocks. This is something we wish
976 to do early because it allows us to group case labels before creating
977 the edges for the CFG, and it speeds up block statement iterators in
978 all passes later on.
979 We only run this pass once, running it more than once is probably not
980 profitable. */
982 /* A map from basic block index to the leading label of that block. */
983 static tree *label_for_bb;
985 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
986 static void
987 update_eh_label (struct eh_region *region)
989 tree old_label = get_eh_region_tree_label (region);
990 if (old_label)
992 tree new_label;
993 basic_block bb = label_to_block (old_label);
995 /* ??? After optimizing, there may be EH regions with labels
996 that have already been removed from the function body, so
997 there is no basic block for them. */
998 if (! bb)
999 return;
1001 new_label = label_for_bb[bb->index];
1002 set_eh_region_tree_label (region, new_label);
1006 /* Given LABEL return the first label in the same basic block. */
1007 static tree
1008 main_block_label (tree label)
1010 basic_block bb = label_to_block (label);
1012 /* label_to_block possibly inserted undefined label into the chain. */
1013 if (!label_for_bb[bb->index])
1014 label_for_bb[bb->index] = label;
1015 return label_for_bb[bb->index];
1018 /* Cleanup redundant labels. This is a three-step process:
1019 1) Find the leading label for each block.
1020 2) Redirect all references to labels to the leading labels.
1021 3) Cleanup all useless labels. */
1023 void
1024 cleanup_dead_labels (void)
1026 basic_block bb;
1027 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1029 /* Find a suitable label for each block. We use the first user-defined
1030 label if there is one, or otherwise just the first label we see. */
1031 FOR_EACH_BB (bb)
1033 block_stmt_iterator i;
1035 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1037 tree label, stmt = bsi_stmt (i);
1039 if (TREE_CODE (stmt) != LABEL_EXPR)
1040 break;
1042 label = LABEL_EXPR_LABEL (stmt);
1044 /* If we have not yet seen a label for the current block,
1045 remember this one and see if there are more labels. */
1046 if (! label_for_bb[bb->index])
1048 label_for_bb[bb->index] = label;
1049 continue;
1052 /* If we did see a label for the current block already, but it
1053 is an artificially created label, replace it if the current
1054 label is a user defined label. */
1055 if (! DECL_ARTIFICIAL (label)
1056 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1058 label_for_bb[bb->index] = label;
1059 break;
1064 /* Now redirect all jumps/branches to the selected label.
1065 First do so for each block ending in a control statement. */
1066 FOR_EACH_BB (bb)
1068 tree stmt = last_stmt (bb);
1069 if (!stmt)
1070 continue;
1072 switch (TREE_CODE (stmt))
1074 case COND_EXPR:
1076 tree true_branch, false_branch;
1078 true_branch = COND_EXPR_THEN (stmt);
1079 false_branch = COND_EXPR_ELSE (stmt);
1081 GOTO_DESTINATION (true_branch)
1082 = main_block_label (GOTO_DESTINATION (true_branch));
1083 GOTO_DESTINATION (false_branch)
1084 = main_block_label (GOTO_DESTINATION (false_branch));
1086 break;
1089 case SWITCH_EXPR:
1091 size_t i;
1092 tree vec = SWITCH_LABELS (stmt);
1093 size_t n = TREE_VEC_LENGTH (vec);
1095 /* Replace all destination labels. */
1096 for (i = 0; i < n; ++i)
1098 tree elt = TREE_VEC_ELT (vec, i);
1099 tree label = main_block_label (CASE_LABEL (elt));
1100 CASE_LABEL (elt) = label;
1102 break;
1105 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1106 remove them until after we've created the CFG edges. */
1107 case GOTO_EXPR:
1108 if (! computed_goto_p (stmt))
1110 GOTO_DESTINATION (stmt)
1111 = main_block_label (GOTO_DESTINATION (stmt));
1112 break;
1115 default:
1116 break;
1120 for_each_eh_region (update_eh_label);
1122 /* Finally, purge dead labels. All user-defined labels and labels that
1123 can be the target of non-local gotos are preserved. */
1124 FOR_EACH_BB (bb)
1126 block_stmt_iterator i;
1127 tree label_for_this_bb = label_for_bb[bb->index];
1129 if (! label_for_this_bb)
1130 continue;
1132 for (i = bsi_start (bb); !bsi_end_p (i); )
1134 tree label, stmt = bsi_stmt (i);
1136 if (TREE_CODE (stmt) != LABEL_EXPR)
1137 break;
1139 label = LABEL_EXPR_LABEL (stmt);
1141 if (label == label_for_this_bb
1142 || ! DECL_ARTIFICIAL (label)
1143 || DECL_NONLOCAL (label))
1144 bsi_next (&i);
1145 else
1146 bsi_remove (&i);
1150 free (label_for_bb);
1153 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1154 and scan the sorted vector of cases. Combine the ones jumping to the
1155 same label.
1156 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1158 void
1159 group_case_labels (void)
1161 basic_block bb;
1163 FOR_EACH_BB (bb)
1165 tree stmt = last_stmt (bb);
1166 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1168 tree labels = SWITCH_LABELS (stmt);
1169 int old_size = TREE_VEC_LENGTH (labels);
1170 int i, j, new_size = old_size;
1171 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1172 tree default_label;
1174 /* The default label is always the last case in a switch
1175 statement after gimplification. */
1176 default_label = CASE_LABEL (default_case);
1178 /* Look for possible opportunities to merge cases.
1179 Ignore the last element of the label vector because it
1180 must be the default case. */
1181 i = 0;
1182 while (i < old_size - 1)
1184 tree base_case, base_label, base_high;
1185 base_case = TREE_VEC_ELT (labels, i);
1187 gcc_assert (base_case);
1188 base_label = CASE_LABEL (base_case);
1190 /* Discard cases that have the same destination as the
1191 default case. */
1192 if (base_label == default_label)
1194 TREE_VEC_ELT (labels, i) = NULL_TREE;
1195 i++;
1196 new_size--;
1197 continue;
1200 base_high = CASE_HIGH (base_case) ?
1201 CASE_HIGH (base_case) : CASE_LOW (base_case);
1202 i++;
1203 /* Try to merge case labels. Break out when we reach the end
1204 of the label vector or when we cannot merge the next case
1205 label with the current one. */
1206 while (i < old_size - 1)
1208 tree merge_case = TREE_VEC_ELT (labels, i);
1209 tree merge_label = CASE_LABEL (merge_case);
1210 tree t = int_const_binop (PLUS_EXPR, base_high,
1211 integer_one_node, 1);
1213 /* Merge the cases if they jump to the same place,
1214 and their ranges are consecutive. */
1215 if (merge_label == base_label
1216 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1218 base_high = CASE_HIGH (merge_case) ?
1219 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1220 CASE_HIGH (base_case) = base_high;
1221 TREE_VEC_ELT (labels, i) = NULL_TREE;
1222 new_size--;
1223 i++;
1225 else
1226 break;
1230 /* Compress the case labels in the label vector, and adjust the
1231 length of the vector. */
1232 for (i = 0, j = 0; i < new_size; i++)
1234 while (! TREE_VEC_ELT (labels, j))
1235 j++;
1236 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1238 TREE_VEC_LENGTH (labels) = new_size;
1243 /* Checks whether we can merge block B into block A. */
1245 static bool
1246 tree_can_merge_blocks_p (basic_block a, basic_block b)
1248 tree stmt;
1249 block_stmt_iterator bsi;
1251 if (!single_succ_p (a))
1252 return false;
1254 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1255 return false;
1257 if (single_succ (a) != b)
1258 return false;
1260 if (!single_pred_p (b))
1261 return false;
1263 if (b == EXIT_BLOCK_PTR)
1264 return false;
1266 /* If A ends by a statement causing exceptions or something similar, we
1267 cannot merge the blocks. */
1268 stmt = last_stmt (a);
1269 if (stmt && stmt_ends_bb_p (stmt))
1270 return false;
1272 /* Do not allow a block with only a non-local label to be merged. */
1273 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1274 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1275 return false;
1277 /* There may be no PHI nodes at the start of B. */
1278 if (phi_nodes (b))
1279 return false;
1281 /* Do not remove user labels. */
1282 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1284 stmt = bsi_stmt (bsi);
1285 if (TREE_CODE (stmt) != LABEL_EXPR)
1286 break;
1287 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1288 return false;
1291 /* Protect the loop latches. */
1292 if (current_loops
1293 && b->loop_father->latch == b)
1294 return false;
1296 return true;
1300 /* Merge block B into block A. */
1302 static void
1303 tree_merge_blocks (basic_block a, basic_block b)
1305 block_stmt_iterator bsi;
1306 tree_stmt_iterator last;
1308 if (dump_file)
1309 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1311 /* Ensure that B follows A. */
1312 move_block_after (b, a);
1314 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1315 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1317 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1318 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1320 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1322 tree label = bsi_stmt (bsi);
1324 bsi_remove (&bsi);
1325 /* Now that we can thread computed gotos, we might have
1326 a situation where we have a forced label in block B
1327 However, the label at the start of block B might still be
1328 used in other ways (think about the runtime checking for
1329 Fortran assigned gotos). So we can not just delete the
1330 label. Instead we move the label to the start of block A. */
1331 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1333 block_stmt_iterator dest_bsi = bsi_start (a);
1334 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1337 else
1339 set_bb_for_stmt (bsi_stmt (bsi), a);
1340 bsi_next (&bsi);
1344 /* Merge the chains. */
1345 last = tsi_last (a->stmt_list);
1346 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1347 b->stmt_list = NULL;
1351 /* Walk the function tree removing unnecessary statements.
1353 * Empty statement nodes are removed
1355 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1357 * Unnecessary COND_EXPRs are removed
1359 * Some unnecessary BIND_EXPRs are removed
1361 Clearly more work could be done. The trick is doing the analysis
1362 and removal fast enough to be a net improvement in compile times.
1364 Note that when we remove a control structure such as a COND_EXPR
1365 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1366 to ensure we eliminate all the useless code. */
1368 struct rus_data
1370 tree *last_goto;
1371 bool repeat;
1372 bool may_throw;
1373 bool may_branch;
1374 bool has_label;
1377 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1379 static bool
1380 remove_useless_stmts_warn_notreached (tree stmt)
1382 if (EXPR_HAS_LOCATION (stmt))
1384 location_t loc = EXPR_LOCATION (stmt);
1385 if (LOCATION_LINE (loc) > 0)
1387 warning ("%Hwill never be executed", &loc);
1388 return true;
1392 switch (TREE_CODE (stmt))
1394 case STATEMENT_LIST:
1396 tree_stmt_iterator i;
1397 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1398 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1399 return true;
1401 break;
1403 case COND_EXPR:
1404 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1405 return true;
1406 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1407 return true;
1408 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1409 return true;
1410 break;
1412 case TRY_FINALLY_EXPR:
1413 case TRY_CATCH_EXPR:
1414 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1415 return true;
1416 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1417 return true;
1418 break;
1420 case CATCH_EXPR:
1421 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1422 case EH_FILTER_EXPR:
1423 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1424 case BIND_EXPR:
1425 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1427 default:
1428 /* Not a live container. */
1429 break;
1432 return false;
1435 static void
1436 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1438 tree then_clause, else_clause, cond;
1439 bool save_has_label, then_has_label, else_has_label;
1441 save_has_label = data->has_label;
1442 data->has_label = false;
1443 data->last_goto = NULL;
1445 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1447 then_has_label = data->has_label;
1448 data->has_label = false;
1449 data->last_goto = NULL;
1451 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1453 else_has_label = data->has_label;
1454 data->has_label = save_has_label | then_has_label | else_has_label;
1456 then_clause = COND_EXPR_THEN (*stmt_p);
1457 else_clause = COND_EXPR_ELSE (*stmt_p);
1458 cond = fold (COND_EXPR_COND (*stmt_p));
1460 /* If neither arm does anything at all, we can remove the whole IF. */
1461 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1463 *stmt_p = build_empty_stmt ();
1464 data->repeat = true;
1467 /* If there are no reachable statements in an arm, then we can
1468 zap the entire conditional. */
1469 else if (integer_nonzerop (cond) && !else_has_label)
1471 if (warn_notreached)
1472 remove_useless_stmts_warn_notreached (else_clause);
1473 *stmt_p = then_clause;
1474 data->repeat = true;
1476 else if (integer_zerop (cond) && !then_has_label)
1478 if (warn_notreached)
1479 remove_useless_stmts_warn_notreached (then_clause);
1480 *stmt_p = else_clause;
1481 data->repeat = true;
1484 /* Check a couple of simple things on then/else with single stmts. */
1485 else
1487 tree then_stmt = expr_only (then_clause);
1488 tree else_stmt = expr_only (else_clause);
1490 /* Notice branches to a common destination. */
1491 if (then_stmt && else_stmt
1492 && TREE_CODE (then_stmt) == GOTO_EXPR
1493 && TREE_CODE (else_stmt) == GOTO_EXPR
1494 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1496 *stmt_p = then_stmt;
1497 data->repeat = true;
1500 /* If the THEN/ELSE clause merely assigns a value to a variable or
1501 parameter which is already known to contain that value, then
1502 remove the useless THEN/ELSE clause. */
1503 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1505 if (else_stmt
1506 && TREE_CODE (else_stmt) == MODIFY_EXPR
1507 && TREE_OPERAND (else_stmt, 0) == cond
1508 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1509 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1511 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1512 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1513 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1514 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1516 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1517 ? then_stmt : else_stmt);
1518 tree *location = (TREE_CODE (cond) == EQ_EXPR
1519 ? &COND_EXPR_THEN (*stmt_p)
1520 : &COND_EXPR_ELSE (*stmt_p));
1522 if (stmt
1523 && TREE_CODE (stmt) == MODIFY_EXPR
1524 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1525 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1526 *location = alloc_stmt_list ();
1530 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1531 would be re-introduced during lowering. */
1532 data->last_goto = NULL;
1536 static void
1537 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1539 bool save_may_branch, save_may_throw;
1540 bool this_may_branch, this_may_throw;
1542 /* Collect may_branch and may_throw information for the body only. */
1543 save_may_branch = data->may_branch;
1544 save_may_throw = data->may_throw;
1545 data->may_branch = false;
1546 data->may_throw = false;
1547 data->last_goto = NULL;
1549 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1551 this_may_branch = data->may_branch;
1552 this_may_throw = data->may_throw;
1553 data->may_branch |= save_may_branch;
1554 data->may_throw |= save_may_throw;
1555 data->last_goto = NULL;
1557 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1559 /* If the body is empty, then we can emit the FINALLY block without
1560 the enclosing TRY_FINALLY_EXPR. */
1561 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1563 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1564 data->repeat = true;
1567 /* If the handler is empty, then we can emit the TRY block without
1568 the enclosing TRY_FINALLY_EXPR. */
1569 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1571 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1572 data->repeat = true;
1575 /* If the body neither throws, nor branches, then we can safely
1576 string the TRY and FINALLY blocks together. */
1577 else if (!this_may_branch && !this_may_throw)
1579 tree stmt = *stmt_p;
1580 *stmt_p = TREE_OPERAND (stmt, 0);
1581 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1582 data->repeat = true;
1587 static void
1588 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1590 bool save_may_throw, this_may_throw;
1591 tree_stmt_iterator i;
1592 tree stmt;
1594 /* Collect may_throw information for the body only. */
1595 save_may_throw = data->may_throw;
1596 data->may_throw = false;
1597 data->last_goto = NULL;
1599 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1601 this_may_throw = data->may_throw;
1602 data->may_throw = save_may_throw;
1604 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1605 if (!this_may_throw)
1607 if (warn_notreached)
1608 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1609 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1610 data->repeat = true;
1611 return;
1614 /* Process the catch clause specially. We may be able to tell that
1615 no exceptions propagate past this point. */
1617 this_may_throw = true;
1618 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1619 stmt = tsi_stmt (i);
1620 data->last_goto = NULL;
1622 switch (TREE_CODE (stmt))
1624 case CATCH_EXPR:
1625 for (; !tsi_end_p (i); tsi_next (&i))
1627 stmt = tsi_stmt (i);
1628 /* If we catch all exceptions, then the body does not
1629 propagate exceptions past this point. */
1630 if (CATCH_TYPES (stmt) == NULL)
1631 this_may_throw = false;
1632 data->last_goto = NULL;
1633 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1635 break;
1637 case EH_FILTER_EXPR:
1638 if (EH_FILTER_MUST_NOT_THROW (stmt))
1639 this_may_throw = false;
1640 else if (EH_FILTER_TYPES (stmt) == NULL)
1641 this_may_throw = false;
1642 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1643 break;
1645 default:
1646 /* Otherwise this is a cleanup. */
1647 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1649 /* If the cleanup is empty, then we can emit the TRY block without
1650 the enclosing TRY_CATCH_EXPR. */
1651 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1653 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1654 data->repeat = true;
1656 break;
1658 data->may_throw |= this_may_throw;
1662 static void
1663 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1665 tree block;
1667 /* First remove anything underneath the BIND_EXPR. */
1668 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1670 /* If the BIND_EXPR has no variables, then we can pull everything
1671 up one level and remove the BIND_EXPR, unless this is the toplevel
1672 BIND_EXPR for the current function or an inlined function.
1674 When this situation occurs we will want to apply this
1675 optimization again. */
1676 block = BIND_EXPR_BLOCK (*stmt_p);
1677 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1678 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1679 && (! block
1680 || ! BLOCK_ABSTRACT_ORIGIN (block)
1681 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1682 != FUNCTION_DECL)))
1684 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1685 data->repeat = true;
1690 static void
1691 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1693 tree dest = GOTO_DESTINATION (*stmt_p);
1695 data->may_branch = true;
1696 data->last_goto = NULL;
1698 /* Record the last goto expr, so that we can delete it if unnecessary. */
1699 if (TREE_CODE (dest) == LABEL_DECL)
1700 data->last_goto = stmt_p;
1704 static void
1705 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1707 tree label = LABEL_EXPR_LABEL (*stmt_p);
1709 data->has_label = true;
1711 /* We do want to jump across non-local label receiver code. */
1712 if (DECL_NONLOCAL (label))
1713 data->last_goto = NULL;
1715 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1717 *data->last_goto = build_empty_stmt ();
1718 data->repeat = true;
1721 /* ??? Add something here to delete unused labels. */
1725 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1726 decl. This allows us to eliminate redundant or useless
1727 calls to "const" functions.
1729 Gimplifier already does the same operation, but we may notice functions
1730 being const and pure once their calls has been gimplified, so we need
1731 to update the flag. */
1733 static void
1734 update_call_expr_flags (tree call)
1736 tree decl = get_callee_fndecl (call);
1737 if (!decl)
1738 return;
1739 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1740 TREE_SIDE_EFFECTS (call) = 0;
1741 if (TREE_NOTHROW (decl))
1742 TREE_NOTHROW (call) = 1;
1746 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1748 void
1749 notice_special_calls (tree t)
1751 int flags = call_expr_flags (t);
1753 if (flags & ECF_MAY_BE_ALLOCA)
1754 current_function_calls_alloca = true;
1755 if (flags & ECF_RETURNS_TWICE)
1756 current_function_calls_setjmp = true;
1760 /* Clear flags set by notice_special_calls. Used by dead code removal
1761 to update the flags. */
1763 void
1764 clear_special_calls (void)
1766 current_function_calls_alloca = false;
1767 current_function_calls_setjmp = false;
1771 static void
1772 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1774 tree t = *tp, op;
1776 switch (TREE_CODE (t))
1778 case COND_EXPR:
1779 remove_useless_stmts_cond (tp, data);
1780 break;
1782 case TRY_FINALLY_EXPR:
1783 remove_useless_stmts_tf (tp, data);
1784 break;
1786 case TRY_CATCH_EXPR:
1787 remove_useless_stmts_tc (tp, data);
1788 break;
1790 case BIND_EXPR:
1791 remove_useless_stmts_bind (tp, data);
1792 break;
1794 case GOTO_EXPR:
1795 remove_useless_stmts_goto (tp, data);
1796 break;
1798 case LABEL_EXPR:
1799 remove_useless_stmts_label (tp, data);
1800 break;
1802 case RETURN_EXPR:
1803 fold_stmt (tp);
1804 data->last_goto = NULL;
1805 data->may_branch = true;
1806 break;
1808 case CALL_EXPR:
1809 fold_stmt (tp);
1810 data->last_goto = NULL;
1811 notice_special_calls (t);
1812 update_call_expr_flags (t);
1813 if (tree_could_throw_p (t))
1814 data->may_throw = true;
1815 break;
1817 case MODIFY_EXPR:
1818 data->last_goto = NULL;
1819 fold_stmt (tp);
1820 op = get_call_expr_in (t);
1821 if (op)
1823 update_call_expr_flags (op);
1824 notice_special_calls (op);
1826 if (tree_could_throw_p (t))
1827 data->may_throw = true;
1828 break;
1830 case STATEMENT_LIST:
1832 tree_stmt_iterator i = tsi_start (t);
1833 while (!tsi_end_p (i))
1835 t = tsi_stmt (i);
1836 if (IS_EMPTY_STMT (t))
1838 tsi_delink (&i);
1839 continue;
1842 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1844 t = tsi_stmt (i);
1845 if (TREE_CODE (t) == STATEMENT_LIST)
1847 tsi_link_before (&i, t, TSI_SAME_STMT);
1848 tsi_delink (&i);
1850 else
1851 tsi_next (&i);
1854 break;
1855 case ASM_EXPR:
1856 fold_stmt (tp);
1857 data->last_goto = NULL;
1858 break;
1860 default:
1861 data->last_goto = NULL;
1862 break;
1866 static void
1867 remove_useless_stmts (void)
1869 struct rus_data data;
1871 clear_special_calls ();
1875 memset (&data, 0, sizeof (data));
1876 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1878 while (data.repeat);
1882 struct tree_opt_pass pass_remove_useless_stmts =
1884 "useless", /* name */
1885 NULL, /* gate */
1886 remove_useless_stmts, /* execute */
1887 NULL, /* sub */
1888 NULL, /* next */
1889 0, /* static_pass_number */
1890 0, /* tv_id */
1891 PROP_gimple_any, /* properties_required */
1892 0, /* properties_provided */
1893 0, /* properties_destroyed */
1894 0, /* todo_flags_start */
1895 TODO_dump_func, /* todo_flags_finish */
1896 0 /* letter */
1900 /* Remove obviously useless statements in basic block BB. */
1902 static void
1903 cfg_remove_useless_stmts_bb (basic_block bb)
1905 block_stmt_iterator bsi;
1906 tree stmt = NULL_TREE;
1907 tree cond, var = NULL_TREE, val = NULL_TREE;
1908 struct var_ann_d *ann;
1910 /* Check whether we come here from a condition, and if so, get the
1911 condition. */
1912 if (!single_pred_p (bb)
1913 || !(single_pred_edge (bb)->flags
1914 & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1915 return;
1917 cond = COND_EXPR_COND (last_stmt (single_pred (bb)));
1919 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1921 var = cond;
1922 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1923 ? boolean_false_node : boolean_true_node);
1925 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1926 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1927 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1929 var = TREE_OPERAND (cond, 0);
1930 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1931 ? boolean_true_node : boolean_false_node);
1933 else
1935 if (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE)
1936 cond = invert_truthvalue (cond);
1937 if (TREE_CODE (cond) == EQ_EXPR
1938 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1939 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1940 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1941 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1942 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1944 var = TREE_OPERAND (cond, 0);
1945 val = TREE_OPERAND (cond, 1);
1947 else
1948 return;
1951 /* Only work for normal local variables. */
1952 ann = var_ann (var);
1953 if (!ann
1954 || ann->may_aliases
1955 || TREE_ADDRESSABLE (var))
1956 return;
1958 if (! TREE_CONSTANT (val))
1960 ann = var_ann (val);
1961 if (!ann
1962 || ann->may_aliases
1963 || TREE_ADDRESSABLE (val))
1964 return;
1967 /* Ignore floating point variables, since comparison behaves weird for
1968 them. */
1969 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1970 return;
1972 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1974 stmt = bsi_stmt (bsi);
1976 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1977 which is already known to contain that value, then remove the useless
1978 THEN/ELSE clause. */
1979 if (TREE_CODE (stmt) == MODIFY_EXPR
1980 && TREE_OPERAND (stmt, 0) == var
1981 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1983 bsi_remove (&bsi);
1984 continue;
1987 /* Invalidate the var if we encounter something that could modify it.
1988 Likewise for the value it was previously set to. Note that we only
1989 consider values that are either a VAR_DECL or PARM_DECL so we
1990 can test for conflict very simply. */
1991 if (TREE_CODE (stmt) == ASM_EXPR
1992 || (TREE_CODE (stmt) == MODIFY_EXPR
1993 && (TREE_OPERAND (stmt, 0) == var
1994 || TREE_OPERAND (stmt, 0) == val)))
1995 return;
1997 bsi_next (&bsi);
2002 /* A CFG-aware version of remove_useless_stmts. */
2004 void
2005 cfg_remove_useless_stmts (void)
2007 basic_block bb;
2009 #ifdef ENABLE_CHECKING
2010 verify_flow_info ();
2011 #endif
2013 FOR_EACH_BB (bb)
2015 cfg_remove_useless_stmts_bb (bb);
2020 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2022 static void
2023 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2025 tree phi;
2027 /* Since this block is no longer reachable, we can just delete all
2028 of its PHI nodes. */
2029 phi = phi_nodes (bb);
2030 while (phi)
2032 tree next = PHI_CHAIN (phi);
2033 remove_phi_node (phi, NULL_TREE);
2034 phi = next;
2037 /* Remove edges to BB's successors. */
2038 while (EDGE_COUNT (bb->succs) > 0)
2039 remove_edge (EDGE_SUCC (bb, 0));
2043 /* Remove statements of basic block BB. */
2045 static void
2046 remove_bb (basic_block bb)
2048 block_stmt_iterator i;
2049 #ifdef USE_MAPPED_LOCATION
2050 source_location loc = UNKNOWN_LOCATION;
2051 #else
2052 source_locus loc = 0;
2053 #endif
2055 if (dump_file)
2057 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2058 if (dump_flags & TDF_DETAILS)
2060 dump_bb (bb, dump_file, 0);
2061 fprintf (dump_file, "\n");
2065 /* If we remove the header or the latch of a loop, mark the loop for
2066 removal by setting its header and latch to NULL. */
2067 if (current_loops)
2069 struct loop *loop = bb->loop_father;
2071 if (loop->latch == bb
2072 || loop->header == bb)
2074 loop->latch = NULL;
2075 loop->header = NULL;
2079 /* Remove all the instructions in the block. */
2080 for (i = bsi_start (bb); !bsi_end_p (i);)
2082 tree stmt = bsi_stmt (i);
2083 if (TREE_CODE (stmt) == LABEL_EXPR
2084 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2086 basic_block new_bb = bb->prev_bb;
2087 block_stmt_iterator new_bsi = bsi_start (new_bb);
2089 bsi_remove (&i);
2090 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2092 else
2094 release_defs (stmt);
2096 set_bb_for_stmt (stmt, NULL);
2097 bsi_remove (&i);
2100 /* Don't warn for removed gotos. Gotos are often removed due to
2101 jump threading, thus resulting in bogus warnings. Not great,
2102 since this way we lose warnings for gotos in the original
2103 program that are indeed unreachable. */
2104 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2106 #ifdef USE_MAPPED_LOCATION
2107 if (EXPR_HAS_LOCATION (stmt))
2108 loc = EXPR_LOCATION (stmt);
2109 #else
2110 source_locus t;
2111 t = EXPR_LOCUS (stmt);
2112 if (t && LOCATION_LINE (*t) > 0)
2113 loc = t;
2114 #endif
2118 /* If requested, give a warning that the first statement in the
2119 block is unreachable. We walk statements backwards in the
2120 loop above, so the last statement we process is the first statement
2121 in the block. */
2122 #ifdef USE_MAPPED_LOCATION
2123 if (warn_notreached && loc > BUILTINS_LOCATION)
2124 warning ("%Hwill never be executed", &loc);
2125 #else
2126 if (warn_notreached && loc)
2127 warning ("%Hwill never be executed", loc);
2128 #endif
2130 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2133 /* A list of all the noreturn calls passed to modify_stmt.
2134 cleanup_control_flow uses it to detect cases where a mid-block
2135 indirect call has been turned into a noreturn call. When this
2136 happens, all the instructions after the call are no longer
2137 reachable and must be deleted as dead. */
2139 VEC(tree) *modified_noreturn_calls;
2141 /* Try to remove superfluous control structures. */
2143 static bool
2144 cleanup_control_flow (void)
2146 basic_block bb;
2147 block_stmt_iterator bsi;
2148 bool retval = false;
2149 tree stmt;
2151 /* Detect cases where a mid-block call is now known not to return. */
2152 while (VEC_length (tree, modified_noreturn_calls))
2154 stmt = VEC_pop (tree, modified_noreturn_calls);
2155 bb = bb_for_stmt (stmt);
2156 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2157 split_block (bb, stmt);
2160 FOR_EACH_BB (bb)
2162 bsi = bsi_last (bb);
2164 if (bsi_end_p (bsi))
2165 continue;
2167 stmt = bsi_stmt (bsi);
2168 if (TREE_CODE (stmt) == COND_EXPR
2169 || TREE_CODE (stmt) == SWITCH_EXPR)
2170 retval |= cleanup_control_expr_graph (bb, bsi);
2172 /* If we had a computed goto which has a compile-time determinable
2173 destination, then we can eliminate the goto. */
2174 if (TREE_CODE (stmt) == GOTO_EXPR
2175 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
2176 && TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) == LABEL_DECL)
2178 edge e;
2179 tree label;
2180 edge_iterator ei;
2181 basic_block target_block;
2182 bool removed_edge = false;
2184 /* First look at all the outgoing edges. Delete any outgoing
2185 edges which do not go to the right block. For the one
2186 edge which goes to the right block, fix up its flags. */
2187 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
2188 target_block = label_to_block (label);
2189 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2191 if (e->dest != target_block)
2193 removed_edge = true;
2194 remove_edge (e);
2196 else
2198 /* Turn off the EDGE_ABNORMAL flag. */
2199 e->flags &= ~EDGE_ABNORMAL;
2201 /* And set EDGE_FALLTHRU. */
2202 e->flags |= EDGE_FALLTHRU;
2203 ei_next (&ei);
2207 /* If we removed one or more edges, then we will need to fix the
2208 dominators. It may be possible to incrementally update them. */
2209 if (removed_edge)
2210 free_dominance_info (CDI_DOMINATORS);
2212 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
2213 relevant information we need. */
2214 bsi_remove (&bsi);
2215 retval = true;
2218 /* Check for indirect calls that have been turned into
2219 noreturn calls. */
2220 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2222 free_dominance_info (CDI_DOMINATORS);
2223 retval = true;
2226 return retval;
2230 /* Disconnect an unreachable block in the control expression starting
2231 at block BB. */
2233 static bool
2234 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2236 edge taken_edge;
2237 bool retval = false;
2238 tree expr = bsi_stmt (bsi), val;
2240 if (!single_succ_p (bb))
2242 edge e;
2243 edge_iterator ei;
2245 switch (TREE_CODE (expr))
2247 case COND_EXPR:
2248 val = COND_EXPR_COND (expr);
2249 break;
2251 case SWITCH_EXPR:
2252 val = SWITCH_COND (expr);
2253 if (TREE_CODE (val) != INTEGER_CST)
2254 return false;
2255 break;
2257 default:
2258 gcc_unreachable ();
2261 taken_edge = find_taken_edge (bb, val);
2262 if (!taken_edge)
2263 return false;
2265 /* Remove all the edges except the one that is always executed. */
2266 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2268 if (e != taken_edge)
2270 taken_edge->probability += e->probability;
2271 taken_edge->count += e->count;
2272 remove_edge (e);
2273 retval = true;
2275 else
2276 ei_next (&ei);
2278 if (taken_edge->probability > REG_BR_PROB_BASE)
2279 taken_edge->probability = REG_BR_PROB_BASE;
2281 else
2282 taken_edge = single_succ_edge (bb);
2284 bsi_remove (&bsi);
2285 taken_edge->flags = EDGE_FALLTHRU;
2287 /* We removed some paths from the cfg. */
2288 free_dominance_info (CDI_DOMINATORS);
2290 return retval;
2293 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2295 static bool
2296 remove_fallthru_edge (VEC(edge) *ev)
2298 edge_iterator ei;
2299 edge e;
2301 FOR_EACH_EDGE (e, ei, ev)
2302 if ((e->flags & EDGE_FALLTHRU) != 0)
2304 remove_edge (e);
2305 return true;
2307 return false;
2310 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2311 predicate VAL, return the edge that will be taken out of the block.
2312 If VAL does not match a unique edge, NULL is returned. */
2314 edge
2315 find_taken_edge (basic_block bb, tree val)
2317 tree stmt;
2319 stmt = last_stmt (bb);
2321 gcc_assert (stmt);
2322 gcc_assert (is_ctrl_stmt (stmt));
2323 gcc_assert (val);
2325 if (! is_gimple_min_invariant (val))
2326 return NULL;
2328 if (TREE_CODE (stmt) == COND_EXPR)
2329 return find_taken_edge_cond_expr (bb, val);
2331 if (TREE_CODE (stmt) == SWITCH_EXPR)
2332 return find_taken_edge_switch_expr (bb, val);
2334 if (computed_goto_p (stmt))
2335 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2337 gcc_unreachable ();
2340 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2341 statement, determine which of the outgoing edges will be taken out of the
2342 block. Return NULL if either edge may be taken. */
2344 static edge
2345 find_taken_edge_computed_goto (basic_block bb, tree val)
2347 basic_block dest;
2348 edge e = NULL;
2350 dest = label_to_block (val);
2351 if (dest)
2353 e = find_edge (bb, dest);
2354 gcc_assert (e != NULL);
2357 return e;
2360 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2361 statement, determine which of the two edges will be taken out of the
2362 block. Return NULL if either edge may be taken. */
2364 static edge
2365 find_taken_edge_cond_expr (basic_block bb, tree val)
2367 edge true_edge, false_edge;
2369 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2371 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2372 return (zero_p (val) ? false_edge : true_edge);
2375 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2376 statement, determine which edge will be taken out of the block. Return
2377 NULL if any edge may be taken. */
2379 static edge
2380 find_taken_edge_switch_expr (basic_block bb, tree val)
2382 tree switch_expr, taken_case;
2383 basic_block dest_bb;
2384 edge e;
2386 switch_expr = last_stmt (bb);
2387 taken_case = find_case_label_for_value (switch_expr, val);
2388 dest_bb = label_to_block (CASE_LABEL (taken_case));
2390 e = find_edge (bb, dest_bb);
2391 gcc_assert (e);
2392 return e;
2396 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2397 We can make optimal use here of the fact that the case labels are
2398 sorted: We can do a binary search for a case matching VAL. */
2400 static tree
2401 find_case_label_for_value (tree switch_expr, tree val)
2403 tree vec = SWITCH_LABELS (switch_expr);
2404 size_t low, high, n = TREE_VEC_LENGTH (vec);
2405 tree default_case = TREE_VEC_ELT (vec, n - 1);
2407 for (low = -1, high = n - 1; high - low > 1; )
2409 size_t i = (high + low) / 2;
2410 tree t = TREE_VEC_ELT (vec, i);
2411 int cmp;
2413 /* Cache the result of comparing CASE_LOW and val. */
2414 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2416 if (cmp > 0)
2417 high = i;
2418 else
2419 low = i;
2421 if (CASE_HIGH (t) == NULL)
2423 /* A singe-valued case label. */
2424 if (cmp == 0)
2425 return t;
2427 else
2429 /* A case range. We can only handle integer ranges. */
2430 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2431 return t;
2435 return default_case;
2439 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2440 those alternatives are equal in each of the PHI nodes, then return
2441 true, else return false. */
2443 static bool
2444 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2446 int n1 = e1->dest_idx;
2447 int n2 = e2->dest_idx;
2448 tree phi;
2450 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2452 tree val1 = PHI_ARG_DEF (phi, n1);
2453 tree val2 = PHI_ARG_DEF (phi, n2);
2455 gcc_assert (val1 != NULL_TREE);
2456 gcc_assert (val2 != NULL_TREE);
2458 if (!operand_equal_for_phi_arg_p (val1, val2))
2459 return false;
2462 return true;
2466 /*---------------------------------------------------------------------------
2467 Debugging functions
2468 ---------------------------------------------------------------------------*/
2470 /* Dump tree-specific information of block BB to file OUTF. */
2472 void
2473 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2475 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2479 /* Dump a basic block on stderr. */
2481 void
2482 debug_tree_bb (basic_block bb)
2484 dump_bb (bb, stderr, 0);
2488 /* Dump basic block with index N on stderr. */
2490 basic_block
2491 debug_tree_bb_n (int n)
2493 debug_tree_bb (BASIC_BLOCK (n));
2494 return BASIC_BLOCK (n);
2498 /* Dump the CFG on stderr.
2500 FLAGS are the same used by the tree dumping functions
2501 (see TDF_* in tree.h). */
2503 void
2504 debug_tree_cfg (int flags)
2506 dump_tree_cfg (stderr, flags);
2510 /* Dump the program showing basic block boundaries on the given FILE.
2512 FLAGS are the same used by the tree dumping functions (see TDF_* in
2513 tree.h). */
2515 void
2516 dump_tree_cfg (FILE *file, int flags)
2518 if (flags & TDF_DETAILS)
2520 const char *funcname
2521 = lang_hooks.decl_printable_name (current_function_decl, 2);
2523 fputc ('\n', file);
2524 fprintf (file, ";; Function %s\n\n", funcname);
2525 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2526 n_basic_blocks, n_edges, last_basic_block);
2528 brief_dump_cfg (file);
2529 fprintf (file, "\n");
2532 if (flags & TDF_STATS)
2533 dump_cfg_stats (file);
2535 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2539 /* Dump CFG statistics on FILE. */
2541 void
2542 dump_cfg_stats (FILE *file)
2544 static long max_num_merged_labels = 0;
2545 unsigned long size, total = 0;
2546 long num_edges;
2547 basic_block bb;
2548 const char * const fmt_str = "%-30s%-13s%12s\n";
2549 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2550 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2551 const char *funcname
2552 = lang_hooks.decl_printable_name (current_function_decl, 2);
2555 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2557 fprintf (file, "---------------------------------------------------------\n");
2558 fprintf (file, fmt_str, "", " Number of ", "Memory");
2559 fprintf (file, fmt_str, "", " instances ", "used ");
2560 fprintf (file, "---------------------------------------------------------\n");
2562 size = n_basic_blocks * sizeof (struct basic_block_def);
2563 total += size;
2564 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2565 SCALE (size), LABEL (size));
2567 num_edges = 0;
2568 FOR_EACH_BB (bb)
2569 num_edges += EDGE_COUNT (bb->succs);
2570 size = num_edges * sizeof (struct edge_def);
2571 total += size;
2572 fprintf (file, fmt_str_1, "Edges", num_edges, SCALE (size), LABEL (size));
2574 size = n_basic_blocks * sizeof (struct bb_ann_d);
2575 total += size;
2576 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2577 SCALE (size), LABEL (size));
2579 fprintf (file, "---------------------------------------------------------\n");
2580 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2581 LABEL (total));
2582 fprintf (file, "---------------------------------------------------------\n");
2583 fprintf (file, "\n");
2585 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2586 max_num_merged_labels = cfg_stats.num_merged_labels;
2588 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2589 cfg_stats.num_merged_labels, max_num_merged_labels);
2591 fprintf (file, "\n");
2595 /* Dump CFG statistics on stderr. Keep extern so that it's always
2596 linked in the final executable. */
2598 void
2599 debug_cfg_stats (void)
2601 dump_cfg_stats (stderr);
2605 /* Dump the flowgraph to a .vcg FILE. */
2607 static void
2608 tree_cfg2vcg (FILE *file)
2610 edge e;
2611 edge_iterator ei;
2612 basic_block bb;
2613 const char *funcname
2614 = lang_hooks.decl_printable_name (current_function_decl, 2);
2616 /* Write the file header. */
2617 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2618 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2619 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2621 /* Write blocks and edges. */
2622 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2624 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2625 e->dest->index);
2627 if (e->flags & EDGE_FAKE)
2628 fprintf (file, " linestyle: dotted priority: 10");
2629 else
2630 fprintf (file, " linestyle: solid priority: 100");
2632 fprintf (file, " }\n");
2634 fputc ('\n', file);
2636 FOR_EACH_BB (bb)
2638 enum tree_code head_code, end_code;
2639 const char *head_name, *end_name;
2640 int head_line = 0;
2641 int end_line = 0;
2642 tree first = first_stmt (bb);
2643 tree last = last_stmt (bb);
2645 if (first)
2647 head_code = TREE_CODE (first);
2648 head_name = tree_code_name[head_code];
2649 head_line = get_lineno (first);
2651 else
2652 head_name = "no-statement";
2654 if (last)
2656 end_code = TREE_CODE (last);
2657 end_name = tree_code_name[end_code];
2658 end_line = get_lineno (last);
2660 else
2661 end_name = "no-statement";
2663 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2664 bb->index, bb->index, head_name, head_line, end_name,
2665 end_line);
2667 FOR_EACH_EDGE (e, ei, bb->succs)
2669 if (e->dest == EXIT_BLOCK_PTR)
2670 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2671 else
2672 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2674 if (e->flags & EDGE_FAKE)
2675 fprintf (file, " priority: 10 linestyle: dotted");
2676 else
2677 fprintf (file, " priority: 100 linestyle: solid");
2679 fprintf (file, " }\n");
2682 if (bb->next_bb != EXIT_BLOCK_PTR)
2683 fputc ('\n', file);
2686 fputs ("}\n\n", file);
2691 /*---------------------------------------------------------------------------
2692 Miscellaneous helpers
2693 ---------------------------------------------------------------------------*/
2695 /* Return true if T represents a stmt that always transfers control. */
2697 bool
2698 is_ctrl_stmt (tree t)
2700 return (TREE_CODE (t) == COND_EXPR
2701 || TREE_CODE (t) == SWITCH_EXPR
2702 || TREE_CODE (t) == GOTO_EXPR
2703 || TREE_CODE (t) == RETURN_EXPR
2704 || TREE_CODE (t) == RESX_EXPR);
2708 /* Return true if T is a statement that may alter the flow of control
2709 (e.g., a call to a non-returning function). */
2711 bool
2712 is_ctrl_altering_stmt (tree t)
2714 tree call;
2716 gcc_assert (t);
2717 call = get_call_expr_in (t);
2718 if (call)
2720 /* A non-pure/const CALL_EXPR alters flow control if the current
2721 function has nonlocal labels. */
2722 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2723 return true;
2725 /* A CALL_EXPR also alters control flow if it does not return. */
2726 if (call_expr_flags (call) & ECF_NORETURN)
2727 return true;
2730 /* If a statement can throw, it alters control flow. */
2731 return tree_can_throw_internal (t);
2735 /* Return true if T is a computed goto. */
2737 bool
2738 computed_goto_p (tree t)
2740 return (TREE_CODE (t) == GOTO_EXPR
2741 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2745 /* Checks whether EXPR is a simple local goto. */
2747 bool
2748 simple_goto_p (tree expr)
2750 return (TREE_CODE (expr) == GOTO_EXPR
2751 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2755 /* Return true if T should start a new basic block. PREV_T is the
2756 statement preceding T. It is used when T is a label or a case label.
2757 Labels should only start a new basic block if their previous statement
2758 wasn't a label. Otherwise, sequence of labels would generate
2759 unnecessary basic blocks that only contain a single label. */
2761 static inline bool
2762 stmt_starts_bb_p (tree t, tree prev_t)
2764 if (t == NULL_TREE)
2765 return false;
2767 /* LABEL_EXPRs start a new basic block only if the preceding
2768 statement wasn't a label of the same type. This prevents the
2769 creation of consecutive blocks that have nothing but a single
2770 label. */
2771 if (TREE_CODE (t) == LABEL_EXPR)
2773 /* Nonlocal and computed GOTO targets always start a new block. */
2774 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2775 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2776 return true;
2778 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2780 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2781 return true;
2783 cfg_stats.num_merged_labels++;
2784 return false;
2786 else
2787 return true;
2790 return false;
2794 /* Return true if T should end a basic block. */
2796 bool
2797 stmt_ends_bb_p (tree t)
2799 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2803 /* Add gotos that used to be represented implicitly in the CFG. */
2805 void
2806 disband_implicit_edges (void)
2808 basic_block bb;
2809 block_stmt_iterator last;
2810 edge e;
2811 edge_iterator ei;
2812 tree stmt, label;
2814 FOR_EACH_BB (bb)
2816 last = bsi_last (bb);
2817 stmt = last_stmt (bb);
2819 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2821 /* Remove superfluous gotos from COND_EXPR branches. Moved
2822 from cfg_remove_useless_stmts here since it violates the
2823 invariants for tree--cfg correspondence and thus fits better
2824 here where we do it anyway. */
2825 e = find_edge (bb, bb->next_bb);
2826 if (e)
2828 if (e->flags & EDGE_TRUE_VALUE)
2829 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2830 else if (e->flags & EDGE_FALSE_VALUE)
2831 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2832 else
2833 gcc_unreachable ();
2834 e->flags |= EDGE_FALLTHRU;
2837 continue;
2840 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2842 /* Remove the RETURN_EXPR if we may fall though to the exit
2843 instead. */
2844 gcc_assert (single_succ_p (bb));
2845 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2847 if (bb->next_bb == EXIT_BLOCK_PTR
2848 && !TREE_OPERAND (stmt, 0))
2850 bsi_remove (&last);
2851 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2853 continue;
2856 /* There can be no fallthru edge if the last statement is a control
2857 one. */
2858 if (stmt && is_ctrl_stmt (stmt))
2859 continue;
2861 /* Find a fallthru edge and emit the goto if necessary. */
2862 FOR_EACH_EDGE (e, ei, bb->succs)
2863 if (e->flags & EDGE_FALLTHRU)
2864 break;
2866 if (!e || e->dest == bb->next_bb)
2867 continue;
2869 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2870 label = tree_block_label (e->dest);
2872 stmt = build1 (GOTO_EXPR, void_type_node, label);
2873 #ifdef USE_MAPPED_LOCATION
2874 SET_EXPR_LOCATION (stmt, e->goto_locus);
2875 #else
2876 SET_EXPR_LOCUS (stmt, e->goto_locus);
2877 #endif
2878 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2879 e->flags &= ~EDGE_FALLTHRU;
2883 /* Remove block annotations and other datastructures. */
2885 void
2886 delete_tree_cfg_annotations (void)
2888 basic_block bb;
2889 if (n_basic_blocks > 0)
2890 free_blocks_annotations ();
2892 label_to_block_map = NULL;
2893 FOR_EACH_BB (bb)
2894 bb->rbi = NULL;
2898 /* Return the first statement in basic block BB. */
2900 tree
2901 first_stmt (basic_block bb)
2903 block_stmt_iterator i = bsi_start (bb);
2904 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2908 /* Return the last statement in basic block BB. */
2910 tree
2911 last_stmt (basic_block bb)
2913 block_stmt_iterator b = bsi_last (bb);
2914 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2918 /* Return a pointer to the last statement in block BB. */
2920 tree *
2921 last_stmt_ptr (basic_block bb)
2923 block_stmt_iterator last = bsi_last (bb);
2924 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2928 /* Return the last statement of an otherwise empty block. Return NULL
2929 if the block is totally empty, or if it contains more than one
2930 statement. */
2932 tree
2933 last_and_only_stmt (basic_block bb)
2935 block_stmt_iterator i = bsi_last (bb);
2936 tree last, prev;
2938 if (bsi_end_p (i))
2939 return NULL_TREE;
2941 last = bsi_stmt (i);
2942 bsi_prev (&i);
2943 if (bsi_end_p (i))
2944 return last;
2946 /* Empty statements should no longer appear in the instruction stream.
2947 Everything that might have appeared before should be deleted by
2948 remove_useless_stmts, and the optimizers should just bsi_remove
2949 instead of smashing with build_empty_stmt.
2951 Thus the only thing that should appear here in a block containing
2952 one executable statement is a label. */
2953 prev = bsi_stmt (i);
2954 if (TREE_CODE (prev) == LABEL_EXPR)
2955 return last;
2956 else
2957 return NULL_TREE;
2961 /* Mark BB as the basic block holding statement T. */
2963 void
2964 set_bb_for_stmt (tree t, basic_block bb)
2966 if (TREE_CODE (t) == PHI_NODE)
2967 PHI_BB (t) = bb;
2968 else if (TREE_CODE (t) == STATEMENT_LIST)
2970 tree_stmt_iterator i;
2971 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2972 set_bb_for_stmt (tsi_stmt (i), bb);
2974 else
2976 stmt_ann_t ann = get_stmt_ann (t);
2977 ann->bb = bb;
2979 /* If the statement is a label, add the label to block-to-labels map
2980 so that we can speed up edge creation for GOTO_EXPRs. */
2981 if (TREE_CODE (t) == LABEL_EXPR)
2983 int uid;
2985 t = LABEL_EXPR_LABEL (t);
2986 uid = LABEL_DECL_UID (t);
2987 if (uid == -1)
2989 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2990 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2991 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2993 else
2994 /* We're moving an existing label. Make sure that we've
2995 removed it from the old block. */
2996 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2997 VARRAY_BB (label_to_block_map, uid) = bb;
3002 /* Finds iterator for STMT. */
3004 extern block_stmt_iterator
3005 bsi_for_stmt (tree stmt)
3007 block_stmt_iterator bsi;
3009 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
3010 if (bsi_stmt (bsi) == stmt)
3011 return bsi;
3013 gcc_unreachable ();
3016 /* Mark statement T as modified, and update it. */
3017 static inline void
3018 update_modified_stmts (tree t)
3020 if (TREE_CODE (t) == STATEMENT_LIST)
3022 tree_stmt_iterator i;
3023 tree stmt;
3024 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
3026 stmt = tsi_stmt (i);
3027 update_stmt_if_modified (stmt);
3030 else
3031 update_stmt_if_modified (t);
3034 /* Insert statement (or statement list) T before the statement
3035 pointed-to by iterator I. M specifies how to update iterator I
3036 after insertion (see enum bsi_iterator_update). */
3038 void
3039 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3041 set_bb_for_stmt (t, i->bb);
3042 update_modified_stmts (t);
3043 tsi_link_before (&i->tsi, t, m);
3047 /* Insert statement (or statement list) T after the statement
3048 pointed-to by iterator I. M specifies how to update iterator I
3049 after insertion (see enum bsi_iterator_update). */
3051 void
3052 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3054 set_bb_for_stmt (t, i->bb);
3055 update_modified_stmts (t);
3056 tsi_link_after (&i->tsi, t, m);
3060 /* Remove the statement pointed to by iterator I. The iterator is updated
3061 to the next statement. */
3063 void
3064 bsi_remove (block_stmt_iterator *i)
3066 tree t = bsi_stmt (*i);
3067 set_bb_for_stmt (t, NULL);
3068 delink_stmt_imm_use (t);
3069 tsi_delink (&i->tsi);
3070 mark_stmt_modified (t);
3074 /* Move the statement at FROM so it comes right after the statement at TO. */
3076 void
3077 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
3079 tree stmt = bsi_stmt (*from);
3080 bsi_remove (from);
3081 bsi_insert_after (to, stmt, BSI_SAME_STMT);
3085 /* Move the statement at FROM so it comes right before the statement at TO. */
3087 void
3088 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
3090 tree stmt = bsi_stmt (*from);
3091 bsi_remove (from);
3092 bsi_insert_before (to, stmt, BSI_SAME_STMT);
3096 /* Move the statement at FROM to the end of basic block BB. */
3098 void
3099 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
3101 block_stmt_iterator last = bsi_last (bb);
3103 /* Have to check bsi_end_p because it could be an empty block. */
3104 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
3105 bsi_move_before (from, &last);
3106 else
3107 bsi_move_after (from, &last);
3111 /* Replace the contents of the statement pointed to by iterator BSI
3112 with STMT. If PRESERVE_EH_INFO is true, the exception handling
3113 information of the original statement is preserved. */
3115 void
3116 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
3118 int eh_region;
3119 tree orig_stmt = bsi_stmt (*bsi);
3121 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
3122 set_bb_for_stmt (stmt, bsi->bb);
3124 /* Preserve EH region information from the original statement, if
3125 requested by the caller. */
3126 if (preserve_eh_info)
3128 eh_region = lookup_stmt_eh_region (orig_stmt);
3129 if (eh_region >= 0)
3130 add_stmt_to_eh_region (stmt, eh_region);
3133 *bsi_stmt_ptr (*bsi) = stmt;
3134 mark_stmt_modified (stmt);
3135 update_modified_stmts (stmt);
3139 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3140 is made to place the statement in an existing basic block, but
3141 sometimes that isn't possible. When it isn't possible, the edge is
3142 split and the statement is added to the new block.
3144 In all cases, the returned *BSI points to the correct location. The
3145 return value is true if insertion should be done after the location,
3146 or false if it should be done before the location. If new basic block
3147 has to be created, it is stored in *NEW_BB. */
3149 static bool
3150 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3151 basic_block *new_bb)
3153 basic_block dest, src;
3154 tree tmp;
3156 dest = e->dest;
3157 restart:
3159 /* If the destination has one predecessor which has no PHI nodes,
3160 insert there. Except for the exit block.
3162 The requirement for no PHI nodes could be relaxed. Basically we
3163 would have to examine the PHIs to prove that none of them used
3164 the value set by the statement we want to insert on E. That
3165 hardly seems worth the effort. */
3166 if (single_pred_p (dest)
3167 && ! phi_nodes (dest)
3168 && dest != EXIT_BLOCK_PTR)
3170 *bsi = bsi_start (dest);
3171 if (bsi_end_p (*bsi))
3172 return true;
3174 /* Make sure we insert after any leading labels. */
3175 tmp = bsi_stmt (*bsi);
3176 while (TREE_CODE (tmp) == LABEL_EXPR)
3178 bsi_next (bsi);
3179 if (bsi_end_p (*bsi))
3180 break;
3181 tmp = bsi_stmt (*bsi);
3184 if (bsi_end_p (*bsi))
3186 *bsi = bsi_last (dest);
3187 return true;
3189 else
3190 return false;
3193 /* If the source has one successor, the edge is not abnormal and
3194 the last statement does not end a basic block, insert there.
3195 Except for the entry block. */
3196 src = e->src;
3197 if ((e->flags & EDGE_ABNORMAL) == 0
3198 && single_succ_p (src)
3199 && src != ENTRY_BLOCK_PTR)
3201 *bsi = bsi_last (src);
3202 if (bsi_end_p (*bsi))
3203 return true;
3205 tmp = bsi_stmt (*bsi);
3206 if (!stmt_ends_bb_p (tmp))
3207 return true;
3209 /* Insert code just before returning the value. We may need to decompose
3210 the return in the case it contains non-trivial operand. */
3211 if (TREE_CODE (tmp) == RETURN_EXPR)
3213 tree op = TREE_OPERAND (tmp, 0);
3214 if (!is_gimple_val (op))
3216 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3217 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3218 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3220 bsi_prev (bsi);
3221 return true;
3225 /* Otherwise, create a new basic block, and split this edge. */
3226 dest = split_edge (e);
3227 if (new_bb)
3228 *new_bb = dest;
3229 e = single_pred_edge (dest);
3230 goto restart;
3234 /* This routine will commit all pending edge insertions, creating any new
3235 basic blocks which are necessary. */
3237 void
3238 bsi_commit_edge_inserts (void)
3240 basic_block bb;
3241 edge e;
3242 edge_iterator ei;
3244 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3246 FOR_EACH_BB (bb)
3247 FOR_EACH_EDGE (e, ei, bb->succs)
3248 bsi_commit_one_edge_insert (e, NULL);
3252 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3253 to this block, otherwise set it to NULL. */
3255 void
3256 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3258 if (new_bb)
3259 *new_bb = NULL;
3260 if (PENDING_STMT (e))
3262 block_stmt_iterator bsi;
3263 tree stmt = PENDING_STMT (e);
3265 PENDING_STMT (e) = NULL_TREE;
3267 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3268 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3269 else
3270 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3275 /* Add STMT to the pending list of edge E. No actual insertion is
3276 made until a call to bsi_commit_edge_inserts () is made. */
3278 void
3279 bsi_insert_on_edge (edge e, tree stmt)
3281 append_to_statement_list (stmt, &PENDING_STMT (e));
3284 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3285 block has to be created, it is returned. */
3287 basic_block
3288 bsi_insert_on_edge_immediate (edge e, tree stmt)
3290 block_stmt_iterator bsi;
3291 basic_block new_bb = NULL;
3293 gcc_assert (!PENDING_STMT (e));
3295 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3296 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3297 else
3298 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3300 return new_bb;
3303 /*---------------------------------------------------------------------------
3304 Tree specific functions for CFG manipulation
3305 ---------------------------------------------------------------------------*/
3307 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3309 static void
3310 reinstall_phi_args (edge new_edge, edge old_edge)
3312 tree var, phi;
3314 if (!PENDING_STMT (old_edge))
3315 return;
3317 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3318 var && phi;
3319 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3321 tree result = TREE_PURPOSE (var);
3322 tree arg = TREE_VALUE (var);
3324 gcc_assert (result == PHI_RESULT (phi));
3326 add_phi_arg (phi, arg, new_edge);
3329 PENDING_STMT (old_edge) = NULL;
3332 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3333 Abort on abnormal edges. */
3335 static basic_block
3336 tree_split_edge (edge edge_in)
3338 basic_block new_bb, after_bb, dest, src;
3339 edge new_edge, e;
3341 /* Abnormal edges cannot be split. */
3342 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3344 src = edge_in->src;
3345 dest = edge_in->dest;
3347 /* Place the new block in the block list. Try to keep the new block
3348 near its "logical" location. This is of most help to humans looking
3349 at debugging dumps. */
3350 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3351 after_bb = edge_in->src;
3352 else
3353 after_bb = dest->prev_bb;
3355 new_bb = create_empty_bb (after_bb);
3356 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3357 new_bb->count = edge_in->count;
3358 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3359 new_edge->probability = REG_BR_PROB_BASE;
3360 new_edge->count = edge_in->count;
3362 e = redirect_edge_and_branch (edge_in, new_bb);
3363 gcc_assert (e);
3364 reinstall_phi_args (new_edge, e);
3366 return new_bb;
3370 /* Return true when BB has label LABEL in it. */
3372 static bool
3373 has_label_p (basic_block bb, tree label)
3375 block_stmt_iterator bsi;
3377 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3379 tree stmt = bsi_stmt (bsi);
3381 if (TREE_CODE (stmt) != LABEL_EXPR)
3382 return false;
3383 if (LABEL_EXPR_LABEL (stmt) == label)
3384 return true;
3386 return false;
3390 /* Callback for walk_tree, check that all elements with address taken are
3391 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3392 inside a PHI node. */
3394 static tree
3395 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3397 tree t = *tp, x;
3398 bool in_phi = (data != NULL);
3400 if (TYPE_P (t))
3401 *walk_subtrees = 0;
3403 /* Check operand N for being valid GIMPLE and give error MSG if not.
3404 We check for constants explicitly since they are not considered
3405 gimple invariants if they overflowed. */
3406 #define CHECK_OP(N, MSG) \
3407 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3408 && !is_gimple_val (TREE_OPERAND (t, N))) \
3409 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3411 switch (TREE_CODE (t))
3413 case SSA_NAME:
3414 if (SSA_NAME_IN_FREE_LIST (t))
3416 error ("SSA name in freelist but still referenced");
3417 return *tp;
3419 break;
3421 case ASSERT_EXPR:
3422 x = fold (ASSERT_EXPR_COND (t));
3423 if (x == boolean_false_node)
3425 error ("ASSERT_EXPR with an always-false condition");
3426 return *tp;
3428 break;
3430 case MODIFY_EXPR:
3431 x = TREE_OPERAND (t, 0);
3432 if (TREE_CODE (x) == BIT_FIELD_REF
3433 && is_gimple_reg (TREE_OPERAND (x, 0)))
3435 error ("GIMPLE register modified with BIT_FIELD_REF");
3436 return t;
3438 break;
3440 case ADDR_EXPR:
3441 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3442 dead PHIs that take the address of something. But if the PHI
3443 result is dead, the fact that it takes the address of anything
3444 is irrelevant. Because we can not tell from here if a PHI result
3445 is dead, we just skip this check for PHIs altogether. This means
3446 we may be missing "valid" checks, but what can you do?
3447 This was PR19217. */
3448 if (in_phi)
3449 break;
3451 /* Skip any references (they will be checked when we recurse down the
3452 tree) and ensure that any variable used as a prefix is marked
3453 addressable. */
3454 for (x = TREE_OPERAND (t, 0);
3455 handled_component_p (x);
3456 x = TREE_OPERAND (x, 0))
3459 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3460 return NULL;
3461 if (!TREE_ADDRESSABLE (x))
3463 error ("address taken, but ADDRESSABLE bit not set");
3464 return x;
3466 break;
3468 case COND_EXPR:
3469 x = COND_EXPR_COND (t);
3470 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3472 error ("non-boolean used in condition");
3473 return x;
3475 break;
3477 case NOP_EXPR:
3478 case CONVERT_EXPR:
3479 case FIX_TRUNC_EXPR:
3480 case FIX_CEIL_EXPR:
3481 case FIX_FLOOR_EXPR:
3482 case FIX_ROUND_EXPR:
3483 case FLOAT_EXPR:
3484 case NEGATE_EXPR:
3485 case ABS_EXPR:
3486 case BIT_NOT_EXPR:
3487 case NON_LVALUE_EXPR:
3488 case TRUTH_NOT_EXPR:
3489 CHECK_OP (0, "Invalid operand to unary operator");
3490 break;
3492 case REALPART_EXPR:
3493 case IMAGPART_EXPR:
3494 case COMPONENT_REF:
3495 case ARRAY_REF:
3496 case ARRAY_RANGE_REF:
3497 case BIT_FIELD_REF:
3498 case VIEW_CONVERT_EXPR:
3499 /* We have a nest of references. Verify that each of the operands
3500 that determine where to reference is either a constant or a variable,
3501 verify that the base is valid, and then show we've already checked
3502 the subtrees. */
3503 while (handled_component_p (t))
3505 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3506 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3507 else if (TREE_CODE (t) == ARRAY_REF
3508 || TREE_CODE (t) == ARRAY_RANGE_REF)
3510 CHECK_OP (1, "Invalid array index.");
3511 if (TREE_OPERAND (t, 2))
3512 CHECK_OP (2, "Invalid array lower bound.");
3513 if (TREE_OPERAND (t, 3))
3514 CHECK_OP (3, "Invalid array stride.");
3516 else if (TREE_CODE (t) == BIT_FIELD_REF)
3518 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3519 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3522 t = TREE_OPERAND (t, 0);
3525 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3527 error ("Invalid reference prefix.");
3528 return t;
3530 *walk_subtrees = 0;
3531 break;
3533 case LT_EXPR:
3534 case LE_EXPR:
3535 case GT_EXPR:
3536 case GE_EXPR:
3537 case EQ_EXPR:
3538 case NE_EXPR:
3539 case UNORDERED_EXPR:
3540 case ORDERED_EXPR:
3541 case UNLT_EXPR:
3542 case UNLE_EXPR:
3543 case UNGT_EXPR:
3544 case UNGE_EXPR:
3545 case UNEQ_EXPR:
3546 case LTGT_EXPR:
3547 case PLUS_EXPR:
3548 case MINUS_EXPR:
3549 case MULT_EXPR:
3550 case TRUNC_DIV_EXPR:
3551 case CEIL_DIV_EXPR:
3552 case FLOOR_DIV_EXPR:
3553 case ROUND_DIV_EXPR:
3554 case TRUNC_MOD_EXPR:
3555 case CEIL_MOD_EXPR:
3556 case FLOOR_MOD_EXPR:
3557 case ROUND_MOD_EXPR:
3558 case RDIV_EXPR:
3559 case EXACT_DIV_EXPR:
3560 case MIN_EXPR:
3561 case MAX_EXPR:
3562 case LSHIFT_EXPR:
3563 case RSHIFT_EXPR:
3564 case LROTATE_EXPR:
3565 case RROTATE_EXPR:
3566 case BIT_IOR_EXPR:
3567 case BIT_XOR_EXPR:
3568 case BIT_AND_EXPR:
3569 CHECK_OP (0, "Invalid operand to binary operator");
3570 CHECK_OP (1, "Invalid operand to binary operator");
3571 break;
3573 default:
3574 break;
3576 return NULL;
3578 #undef CHECK_OP
3582 /* Verify STMT, return true if STMT is not in GIMPLE form.
3583 TODO: Implement type checking. */
3585 static bool
3586 verify_stmt (tree stmt, bool last_in_block)
3588 tree addr;
3590 if (!is_gimple_stmt (stmt))
3592 error ("Is not a valid GIMPLE statement.");
3593 goto fail;
3596 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3597 if (addr)
3599 debug_generic_stmt (addr);
3600 return true;
3603 /* If the statement is marked as part of an EH region, then it is
3604 expected that the statement could throw. Verify that when we
3605 have optimizations that simplify statements such that we prove
3606 that they cannot throw, that we update other data structures
3607 to match. */
3608 if (lookup_stmt_eh_region (stmt) >= 0)
3610 if (!tree_could_throw_p (stmt))
3612 error ("Statement marked for throw, but doesn%'t.");
3613 goto fail;
3615 if (!last_in_block && tree_can_throw_internal (stmt))
3617 error ("Statement marked for throw in middle of block.");
3618 goto fail;
3622 return false;
3624 fail:
3625 debug_generic_stmt (stmt);
3626 return true;
3630 /* Return true when the T can be shared. */
3632 static bool
3633 tree_node_can_be_shared (tree t)
3635 if (IS_TYPE_OR_DECL_P (t)
3636 /* We check for constants explicitly since they are not considered
3637 gimple invariants if they overflowed. */
3638 || CONSTANT_CLASS_P (t)
3639 || is_gimple_min_invariant (t)
3640 || TREE_CODE (t) == SSA_NAME
3641 || t == error_mark_node)
3642 return true;
3644 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3645 return true;
3647 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3648 /* We check for constants explicitly since they are not considered
3649 gimple invariants if they overflowed. */
3650 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3651 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3652 || (TREE_CODE (t) == COMPONENT_REF
3653 || TREE_CODE (t) == REALPART_EXPR
3654 || TREE_CODE (t) == IMAGPART_EXPR))
3655 t = TREE_OPERAND (t, 0);
3657 if (DECL_P (t))
3658 return true;
3660 return false;
3664 /* Called via walk_trees. Verify tree sharing. */
3666 static tree
3667 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3669 htab_t htab = (htab_t) data;
3670 void **slot;
3672 if (tree_node_can_be_shared (*tp))
3674 *walk_subtrees = false;
3675 return NULL;
3678 slot = htab_find_slot (htab, *tp, INSERT);
3679 if (*slot)
3680 return *slot;
3681 *slot = *tp;
3683 return NULL;
3687 /* Verify the GIMPLE statement chain. */
3689 void
3690 verify_stmts (void)
3692 basic_block bb;
3693 block_stmt_iterator bsi;
3694 bool err = false;
3695 htab_t htab;
3696 tree addr;
3698 timevar_push (TV_TREE_STMT_VERIFY);
3699 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3701 FOR_EACH_BB (bb)
3703 tree phi;
3704 int i;
3706 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3708 int phi_num_args = PHI_NUM_ARGS (phi);
3710 if (bb_for_stmt (phi) != bb)
3712 error ("bb_for_stmt (phi) is set to a wrong basic block\n");
3713 err |= true;
3716 for (i = 0; i < phi_num_args; i++)
3718 tree t = PHI_ARG_DEF (phi, i);
3719 tree addr;
3721 /* Addressable variables do have SSA_NAMEs but they
3722 are not considered gimple values. */
3723 if (TREE_CODE (t) != SSA_NAME
3724 && TREE_CODE (t) != FUNCTION_DECL
3725 && !is_gimple_val (t))
3727 error ("PHI def is not a GIMPLE value");
3728 debug_generic_stmt (phi);
3729 debug_generic_stmt (t);
3730 err |= true;
3733 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3734 if (addr)
3736 debug_generic_stmt (addr);
3737 err |= true;
3740 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3741 if (addr)
3743 error ("Incorrect sharing of tree nodes");
3744 debug_generic_stmt (phi);
3745 debug_generic_stmt (addr);
3746 err |= true;
3751 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3753 tree stmt = bsi_stmt (bsi);
3755 if (bb_for_stmt (stmt) != bb)
3757 error ("bb_for_stmt (stmt) is set to a wrong basic block\n");
3758 err |= true;
3761 bsi_next (&bsi);
3762 err |= verify_stmt (stmt, bsi_end_p (bsi));
3763 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3764 if (addr)
3766 error ("Incorrect sharing of tree nodes");
3767 debug_generic_stmt (stmt);
3768 debug_generic_stmt (addr);
3769 err |= true;
3774 if (err)
3775 internal_error ("verify_stmts failed.");
3777 htab_delete (htab);
3778 timevar_pop (TV_TREE_STMT_VERIFY);
3782 /* Verifies that the flow information is OK. */
3784 static int
3785 tree_verify_flow_info (void)
3787 int err = 0;
3788 basic_block bb;
3789 block_stmt_iterator bsi;
3790 tree stmt;
3791 edge e;
3792 edge_iterator ei;
3794 if (ENTRY_BLOCK_PTR->stmt_list)
3796 error ("ENTRY_BLOCK has a statement list associated with it\n");
3797 err = 1;
3800 if (EXIT_BLOCK_PTR->stmt_list)
3802 error ("EXIT_BLOCK has a statement list associated with it\n");
3803 err = 1;
3806 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3807 if (e->flags & EDGE_FALLTHRU)
3809 error ("Fallthru to exit from bb %d\n", e->src->index);
3810 err = 1;
3813 FOR_EACH_BB (bb)
3815 bool found_ctrl_stmt = false;
3817 stmt = NULL_TREE;
3819 /* Skip labels on the start of basic block. */
3820 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3822 tree prev_stmt = stmt;
3824 stmt = bsi_stmt (bsi);
3826 if (TREE_CODE (stmt) != LABEL_EXPR)
3827 break;
3829 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3831 error ("Nonlocal label %s is not first "
3832 "in a sequence of labels in bb %d",
3833 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3834 bb->index);
3835 err = 1;
3838 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3840 error ("Label %s to block does not match in bb %d\n",
3841 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3842 bb->index);
3843 err = 1;
3846 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3847 != current_function_decl)
3849 error ("Label %s has incorrect context in bb %d\n",
3850 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3851 bb->index);
3852 err = 1;
3856 /* Verify that body of basic block BB is free of control flow. */
3857 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3859 tree stmt = bsi_stmt (bsi);
3861 if (found_ctrl_stmt)
3863 error ("Control flow in the middle of basic block %d\n",
3864 bb->index);
3865 err = 1;
3868 if (stmt_ends_bb_p (stmt))
3869 found_ctrl_stmt = true;
3871 if (TREE_CODE (stmt) == LABEL_EXPR)
3873 error ("Label %s in the middle of basic block %d\n",
3874 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3875 bb->index);
3876 err = 1;
3879 bsi = bsi_last (bb);
3880 if (bsi_end_p (bsi))
3881 continue;
3883 stmt = bsi_stmt (bsi);
3885 if (is_ctrl_stmt (stmt))
3887 FOR_EACH_EDGE (e, ei, bb->succs)
3888 if (e->flags & EDGE_FALLTHRU)
3890 error ("Fallthru edge after a control statement in bb %d \n",
3891 bb->index);
3892 err = 1;
3896 switch (TREE_CODE (stmt))
3898 case COND_EXPR:
3900 edge true_edge;
3901 edge false_edge;
3902 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3903 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3905 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3906 err = 1;
3909 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3911 if (!true_edge || !false_edge
3912 || !(true_edge->flags & EDGE_TRUE_VALUE)
3913 || !(false_edge->flags & EDGE_FALSE_VALUE)
3914 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3915 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3916 || EDGE_COUNT (bb->succs) >= 3)
3918 error ("Wrong outgoing edge flags at end of bb %d\n",
3919 bb->index);
3920 err = 1;
3923 if (!has_label_p (true_edge->dest,
3924 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3926 error ("%<then%> label does not match edge at end of bb %d\n",
3927 bb->index);
3928 err = 1;
3931 if (!has_label_p (false_edge->dest,
3932 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3934 error ("%<else%> label does not match edge at end of bb %d\n",
3935 bb->index);
3936 err = 1;
3939 break;
3941 case GOTO_EXPR:
3942 if (simple_goto_p (stmt))
3944 error ("Explicit goto at end of bb %d\n", bb->index);
3945 err = 1;
3947 else
3949 /* FIXME. We should double check that the labels in the
3950 destination blocks have their address taken. */
3951 FOR_EACH_EDGE (e, ei, bb->succs)
3952 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3953 | EDGE_FALSE_VALUE))
3954 || !(e->flags & EDGE_ABNORMAL))
3956 error ("Wrong outgoing edge flags at end of bb %d\n",
3957 bb->index);
3958 err = 1;
3961 break;
3963 case RETURN_EXPR:
3964 if (!single_succ_p (bb)
3965 || (single_succ_edge (bb)->flags
3966 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3967 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3969 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3970 err = 1;
3972 if (single_succ (bb) != EXIT_BLOCK_PTR)
3974 error ("Return edge does not point to exit in bb %d\n",
3975 bb->index);
3976 err = 1;
3978 break;
3980 case SWITCH_EXPR:
3982 tree prev;
3983 edge e;
3984 size_t i, n;
3985 tree vec;
3987 vec = SWITCH_LABELS (stmt);
3988 n = TREE_VEC_LENGTH (vec);
3990 /* Mark all the destination basic blocks. */
3991 for (i = 0; i < n; ++i)
3993 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3994 basic_block label_bb = label_to_block (lab);
3996 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3997 label_bb->aux = (void *)1;
4000 /* Verify that the case labels are sorted. */
4001 prev = TREE_VEC_ELT (vec, 0);
4002 for (i = 1; i < n - 1; ++i)
4004 tree c = TREE_VEC_ELT (vec, i);
4005 if (! CASE_LOW (c))
4007 error ("Found default case not at end of case vector");
4008 err = 1;
4009 continue;
4011 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4013 error ("Case labels not sorted:\n ");
4014 print_generic_expr (stderr, prev, 0);
4015 fprintf (stderr," is greater than ");
4016 print_generic_expr (stderr, c, 0);
4017 fprintf (stderr," but comes before it.\n");
4018 err = 1;
4020 prev = c;
4022 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4024 error ("No default case found at end of case vector");
4025 err = 1;
4028 FOR_EACH_EDGE (e, ei, bb->succs)
4030 if (!e->dest->aux)
4032 error ("Extra outgoing edge %d->%d\n",
4033 bb->index, e->dest->index);
4034 err = 1;
4036 e->dest->aux = (void *)2;
4037 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4038 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4040 error ("Wrong outgoing edge flags at end of bb %d\n",
4041 bb->index);
4042 err = 1;
4046 /* Check that we have all of them. */
4047 for (i = 0; i < n; ++i)
4049 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4050 basic_block label_bb = label_to_block (lab);
4052 if (label_bb->aux != (void *)2)
4054 error ("Missing edge %i->%i",
4055 bb->index, label_bb->index);
4056 err = 1;
4060 FOR_EACH_EDGE (e, ei, bb->succs)
4061 e->dest->aux = (void *)0;
4064 default: ;
4068 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
4069 verify_dominators (CDI_DOMINATORS);
4071 return err;
4075 /* Updates phi nodes after creating a forwarder block joined
4076 by edge FALLTHRU. */
4078 static void
4079 tree_make_forwarder_block (edge fallthru)
4081 edge e;
4082 edge_iterator ei;
4083 basic_block dummy, bb;
4084 tree phi, new_phi, var;
4086 dummy = fallthru->src;
4087 bb = fallthru->dest;
4089 if (single_pred_p (bb))
4090 return;
4092 /* If we redirected a branch we must create new phi nodes at the
4093 start of BB. */
4094 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4096 var = PHI_RESULT (phi);
4097 new_phi = create_phi_node (var, bb);
4098 SSA_NAME_DEF_STMT (var) = new_phi;
4099 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4100 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4103 /* Ensure that the PHI node chain is in the same order. */
4104 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4106 /* Add the arguments we have stored on edges. */
4107 FOR_EACH_EDGE (e, ei, bb->preds)
4109 if (e == fallthru)
4110 continue;
4112 flush_pending_stmts (e);
4117 /* Return true if basic block BB does nothing except pass control
4118 flow to another block and that we can safely insert a label at
4119 the start of the successor block.
4121 As a precondition, we require that BB be not equal to
4122 ENTRY_BLOCK_PTR. */
4124 static bool
4125 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
4127 block_stmt_iterator bsi;
4129 /* BB must have a single outgoing edge. */
4130 if (single_succ_p (bb) != 1
4131 /* If PHI_WANTED is false, BB must not have any PHI nodes.
4132 Otherwise, BB must have PHI nodes. */
4133 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
4134 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
4135 || single_succ (bb) == EXIT_BLOCK_PTR
4136 /* Nor should this be an infinite loop. */
4137 || single_succ (bb) == bb
4138 /* BB may not have an abnormal outgoing edge. */
4139 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4140 return false;
4142 #if ENABLE_CHECKING
4143 gcc_assert (bb != ENTRY_BLOCK_PTR);
4144 #endif
4146 /* Now walk through the statements backward. We can ignore labels,
4147 anything else means this is not a forwarder block. */
4148 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4150 tree stmt = bsi_stmt (bsi);
4152 switch (TREE_CODE (stmt))
4154 case LABEL_EXPR:
4155 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4156 return false;
4157 break;
4159 default:
4160 return false;
4164 if (find_edge (ENTRY_BLOCK_PTR, bb))
4165 return false;
4167 if (current_loops)
4169 basic_block dest;
4170 /* Protect loop latches, headers and preheaders. */
4171 if (bb->loop_father->header == bb)
4172 return false;
4173 dest = EDGE_SUCC (bb, 0)->dest;
4175 if (dest->loop_father->header == dest)
4176 return false;
4179 return true;
4182 /* Return true if BB has at least one abnormal incoming edge. */
4184 static inline bool
4185 has_abnormal_incoming_edge_p (basic_block bb)
4187 edge e;
4188 edge_iterator ei;
4190 FOR_EACH_EDGE (e, ei, bb->preds)
4191 if (e->flags & EDGE_ABNORMAL)
4192 return true;
4194 return false;
4197 /* Removes forwarder block BB. Returns false if this failed. If a new
4198 forwarder block is created due to redirection of edges, it is
4199 stored to worklist. */
4201 static bool
4202 remove_forwarder_block (basic_block bb, basic_block **worklist)
4204 edge succ = single_succ_edge (bb), e, s;
4205 basic_block dest = succ->dest;
4206 tree label;
4207 tree phi;
4208 edge_iterator ei;
4209 block_stmt_iterator bsi, bsi_to;
4210 bool seen_abnormal_edge = false;
4212 /* We check for infinite loops already in tree_forwarder_block_p.
4213 However it may happen that the infinite loop is created
4214 afterwards due to removal of forwarders. */
4215 if (dest == bb)
4216 return false;
4218 /* If the destination block consists of a nonlocal label, do not merge
4219 it. */
4220 label = first_stmt (dest);
4221 if (label
4222 && TREE_CODE (label) == LABEL_EXPR
4223 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4224 return false;
4226 /* If there is an abnormal edge to basic block BB, but not into
4227 dest, problems might occur during removal of the phi node at out
4228 of ssa due to overlapping live ranges of registers.
4230 If there is an abnormal edge in DEST, the problems would occur
4231 anyway since cleanup_dead_labels would then merge the labels for
4232 two different eh regions, and rest of exception handling code
4233 does not like it.
4235 So if there is an abnormal edge to BB, proceed only if there is
4236 no abnormal edge to DEST and there are no phi nodes in DEST. */
4237 if (has_abnormal_incoming_edge_p (bb))
4239 seen_abnormal_edge = true;
4241 if (has_abnormal_incoming_edge_p (dest)
4242 || phi_nodes (dest) != NULL_TREE)
4243 return false;
4246 /* If there are phi nodes in DEST, and some of the blocks that are
4247 predecessors of BB are also predecessors of DEST, check that the
4248 phi node arguments match. */
4249 if (phi_nodes (dest))
4251 FOR_EACH_EDGE (e, ei, bb->preds)
4253 s = find_edge (e->src, dest);
4254 if (!s)
4255 continue;
4257 if (!phi_alternatives_equal (dest, succ, s))
4258 return false;
4262 /* Redirect the edges. */
4263 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4265 if (e->flags & EDGE_ABNORMAL)
4267 /* If there is an abnormal edge, redirect it anyway, and
4268 move the labels to the new block to make it legal. */
4269 s = redirect_edge_succ_nodup (e, dest);
4271 else
4272 s = redirect_edge_and_branch (e, dest);
4274 if (s == e)
4276 /* Create arguments for the phi nodes, since the edge was not
4277 here before. */
4278 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4279 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4281 else
4283 /* The source basic block might become a forwarder. We know
4284 that it was not a forwarder before, since it used to have
4285 at least two outgoing edges, so we may just add it to
4286 worklist. */
4287 if (tree_forwarder_block_p (s->src, false))
4288 *(*worklist)++ = s->src;
4292 if (seen_abnormal_edge)
4294 /* Move the labels to the new block, so that the redirection of
4295 the abnormal edges works. */
4297 bsi_to = bsi_start (dest);
4298 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4300 label = bsi_stmt (bsi);
4301 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4302 bsi_remove (&bsi);
4303 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4307 /* Update the dominators. */
4308 if (dom_info_available_p (CDI_DOMINATORS))
4310 basic_block dom, dombb, domdest;
4312 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4313 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4314 if (domdest == bb)
4316 /* Shortcut to avoid calling (relatively expensive)
4317 nearest_common_dominator unless necessary. */
4318 dom = dombb;
4320 else
4321 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4323 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4326 /* And kill the forwarder block. */
4327 delete_basic_block (bb);
4329 return true;
4332 /* Removes forwarder blocks. */
4334 static bool
4335 cleanup_forwarder_blocks (void)
4337 basic_block bb;
4338 bool changed = false;
4339 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4340 basic_block *current = worklist;
4342 FOR_EACH_BB (bb)
4344 if (tree_forwarder_block_p (bb, false))
4345 *current++ = bb;
4348 while (current != worklist)
4350 bb = *--current;
4351 changed |= remove_forwarder_block (bb, &current);
4354 free (worklist);
4355 return changed;
4358 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4360 static void
4361 remove_forwarder_block_with_phi (basic_block bb)
4363 edge succ = single_succ_edge (bb);
4364 basic_block dest = succ->dest;
4365 tree label;
4366 basic_block dombb, domdest, dom;
4368 /* We check for infinite loops already in tree_forwarder_block_p.
4369 However it may happen that the infinite loop is created
4370 afterwards due to removal of forwarders. */
4371 if (dest == bb)
4372 return;
4374 /* If the destination block consists of a nonlocal label, do not
4375 merge it. */
4376 label = first_stmt (dest);
4377 if (label
4378 && TREE_CODE (label) == LABEL_EXPR
4379 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4380 return;
4382 /* Redirect each incoming edge to BB to DEST. */
4383 while (EDGE_COUNT (bb->preds) > 0)
4385 edge e = EDGE_PRED (bb, 0), s;
4386 tree phi;
4388 s = find_edge (e->src, dest);
4389 if (s)
4391 /* We already have an edge S from E->src to DEST. If S and
4392 E->dest's sole successor edge have the same PHI arguments
4393 at DEST, redirect S to DEST. */
4394 if (phi_alternatives_equal (dest, s, succ))
4396 e = redirect_edge_and_branch (e, dest);
4397 PENDING_STMT (e) = NULL_TREE;
4398 continue;
4401 /* PHI arguments are different. Create a forwarder block by
4402 splitting E so that we can merge PHI arguments on E to
4403 DEST. */
4404 e = single_succ_edge (split_edge (e));
4407 s = redirect_edge_and_branch (e, dest);
4409 /* redirect_edge_and_branch must not create a new edge. */
4410 gcc_assert (s == e);
4412 /* Add to the PHI nodes at DEST each PHI argument removed at the
4413 destination of E. */
4414 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4416 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4418 if (TREE_CODE (def) == SSA_NAME)
4420 tree var;
4422 /* If DEF is one of the results of PHI nodes removed during
4423 redirection, replace it with the PHI argument that used
4424 to be on E. */
4425 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4427 tree old_arg = TREE_PURPOSE (var);
4428 tree new_arg = TREE_VALUE (var);
4430 if (def == old_arg)
4432 def = new_arg;
4433 break;
4438 add_phi_arg (phi, def, s);
4441 PENDING_STMT (e) = NULL;
4444 /* Update the dominators. */
4445 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4446 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4447 if (domdest == bb)
4449 /* Shortcut to avoid calling (relatively expensive)
4450 nearest_common_dominator unless necessary. */
4451 dom = dombb;
4453 else
4454 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4456 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4458 /* Remove BB since all of BB's incoming edges have been redirected
4459 to DEST. */
4460 delete_basic_block (bb);
4463 /* This pass merges PHI nodes if one feeds into another. For example,
4464 suppose we have the following:
4466 goto <bb 9> (<L9>);
4468 <L8>:;
4469 tem_17 = foo ();
4471 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4472 <L9>:;
4474 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4475 <L10>:;
4477 Then we merge the first PHI node into the second one like so:
4479 goto <bb 9> (<L10>);
4481 <L8>:;
4482 tem_17 = foo ();
4484 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4485 <L10>:;
4488 static void
4489 merge_phi_nodes (void)
4491 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4492 basic_block *current = worklist;
4493 basic_block bb;
4495 calculate_dominance_info (CDI_DOMINATORS);
4497 /* Find all PHI nodes that we may be able to merge. */
4498 FOR_EACH_BB (bb)
4500 basic_block dest;
4502 /* Look for a forwarder block with PHI nodes. */
4503 if (!tree_forwarder_block_p (bb, true))
4504 continue;
4506 dest = single_succ (bb);
4508 /* We have to feed into another basic block with PHI
4509 nodes. */
4510 if (!phi_nodes (dest)
4511 /* We don't want to deal with a basic block with
4512 abnormal edges. */
4513 || has_abnormal_incoming_edge_p (bb))
4514 continue;
4516 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4518 /* If BB does not dominate DEST, then the PHI nodes at
4519 DEST must be the only users of the results of the PHI
4520 nodes at BB. */
4521 *current++ = bb;
4525 /* Now let's drain WORKLIST. */
4526 while (current != worklist)
4528 bb = *--current;
4529 remove_forwarder_block_with_phi (bb);
4532 free (worklist);
4535 static bool
4536 gate_merge_phi (void)
4538 return 1;
4541 struct tree_opt_pass pass_merge_phi = {
4542 "mergephi", /* name */
4543 gate_merge_phi, /* gate */
4544 merge_phi_nodes, /* execute */
4545 NULL, /* sub */
4546 NULL, /* next */
4547 0, /* static_pass_number */
4548 TV_TREE_MERGE_PHI, /* tv_id */
4549 PROP_cfg | PROP_ssa, /* properties_required */
4550 0, /* properties_provided */
4551 0, /* properties_destroyed */
4552 0, /* todo_flags_start */
4553 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4554 | TODO_verify_ssa,
4555 0 /* letter */
4558 /* Return a non-special label in the head of basic block BLOCK.
4559 Create one if it doesn't exist. */
4561 tree
4562 tree_block_label (basic_block bb)
4564 block_stmt_iterator i, s = bsi_start (bb);
4565 bool first = true;
4566 tree label, stmt;
4568 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4570 stmt = bsi_stmt (i);
4571 if (TREE_CODE (stmt) != LABEL_EXPR)
4572 break;
4573 label = LABEL_EXPR_LABEL (stmt);
4574 if (!DECL_NONLOCAL (label))
4576 if (!first)
4577 bsi_move_before (&i, &s);
4578 return label;
4582 label = create_artificial_label ();
4583 stmt = build1 (LABEL_EXPR, void_type_node, label);
4584 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4585 return label;
4589 /* Attempt to perform edge redirection by replacing a possibly complex
4590 jump instruction by a goto or by removing the jump completely.
4591 This can apply only if all edges now point to the same block. The
4592 parameters and return values are equivalent to
4593 redirect_edge_and_branch. */
4595 static edge
4596 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4598 basic_block src = e->src;
4599 block_stmt_iterator b;
4600 tree stmt;
4602 /* We can replace or remove a complex jump only when we have exactly
4603 two edges. */
4604 if (EDGE_COUNT (src->succs) != 2
4605 /* Verify that all targets will be TARGET. Specifically, the
4606 edge that is not E must also go to TARGET. */
4607 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4608 return NULL;
4610 b = bsi_last (src);
4611 if (bsi_end_p (b))
4612 return NULL;
4613 stmt = bsi_stmt (b);
4615 if (TREE_CODE (stmt) == COND_EXPR
4616 || TREE_CODE (stmt) == SWITCH_EXPR)
4618 bsi_remove (&b);
4619 e = ssa_redirect_edge (e, target);
4620 e->flags = EDGE_FALLTHRU;
4621 return e;
4624 return NULL;
4628 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4629 edge representing the redirected branch. */
4631 static edge
4632 tree_redirect_edge_and_branch (edge e, basic_block dest)
4634 basic_block bb = e->src;
4635 block_stmt_iterator bsi;
4636 edge ret;
4637 tree label, stmt;
4639 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4640 return NULL;
4642 if (e->src != ENTRY_BLOCK_PTR
4643 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4644 return ret;
4646 if (e->dest == dest)
4647 return NULL;
4649 label = tree_block_label (dest);
4651 bsi = bsi_last (bb);
4652 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4654 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4656 case COND_EXPR:
4657 stmt = (e->flags & EDGE_TRUE_VALUE
4658 ? COND_EXPR_THEN (stmt)
4659 : COND_EXPR_ELSE (stmt));
4660 GOTO_DESTINATION (stmt) = label;
4661 break;
4663 case GOTO_EXPR:
4664 /* No non-abnormal edges should lead from a non-simple goto, and
4665 simple ones should be represented implicitly. */
4666 gcc_unreachable ();
4668 case SWITCH_EXPR:
4670 tree cases = get_cases_for_edge (e, stmt);
4672 /* If we have a list of cases associated with E, then use it
4673 as it's a lot faster than walking the entire case vector. */
4674 if (cases)
4676 edge e2 = find_edge (e->src, dest);
4677 tree last, first;
4679 first = cases;
4680 while (cases)
4682 last = cases;
4683 CASE_LABEL (cases) = label;
4684 cases = TREE_CHAIN (cases);
4687 /* If there was already an edge in the CFG, then we need
4688 to move all the cases associated with E to E2. */
4689 if (e2)
4691 tree cases2 = get_cases_for_edge (e2, stmt);
4693 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4694 TREE_CHAIN (cases2) = first;
4697 else
4699 tree vec = SWITCH_LABELS (stmt);
4700 size_t i, n = TREE_VEC_LENGTH (vec);
4702 for (i = 0; i < n; i++)
4704 tree elt = TREE_VEC_ELT (vec, i);
4706 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4707 CASE_LABEL (elt) = label;
4711 break;
4714 case RETURN_EXPR:
4715 bsi_remove (&bsi);
4716 e->flags |= EDGE_FALLTHRU;
4717 break;
4719 default:
4720 /* Otherwise it must be a fallthru edge, and we don't need to
4721 do anything besides redirecting it. */
4722 gcc_assert (e->flags & EDGE_FALLTHRU);
4723 break;
4726 /* Update/insert PHI nodes as necessary. */
4728 /* Now update the edges in the CFG. */
4729 e = ssa_redirect_edge (e, dest);
4731 return e;
4735 /* Simple wrapper, as we can always redirect fallthru edges. */
4737 static basic_block
4738 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4740 e = tree_redirect_edge_and_branch (e, dest);
4741 gcc_assert (e);
4743 return NULL;
4747 /* Splits basic block BB after statement STMT (but at least after the
4748 labels). If STMT is NULL, BB is split just after the labels. */
4750 static basic_block
4751 tree_split_block (basic_block bb, void *stmt)
4753 block_stmt_iterator bsi, bsi_tgt;
4754 tree act;
4755 basic_block new_bb;
4756 edge e;
4757 edge_iterator ei;
4759 new_bb = create_empty_bb (bb);
4761 /* Redirect the outgoing edges. */
4762 new_bb->succs = bb->succs;
4763 bb->succs = NULL;
4764 FOR_EACH_EDGE (e, ei, new_bb->succs)
4765 e->src = new_bb;
4767 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4768 stmt = NULL;
4770 /* Move everything from BSI to the new basic block. */
4771 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4773 act = bsi_stmt (bsi);
4774 if (TREE_CODE (act) == LABEL_EXPR)
4775 continue;
4777 if (!stmt)
4778 break;
4780 if (stmt == act)
4782 bsi_next (&bsi);
4783 break;
4787 bsi_tgt = bsi_start (new_bb);
4788 while (!bsi_end_p (bsi))
4790 act = bsi_stmt (bsi);
4791 bsi_remove (&bsi);
4792 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4795 return new_bb;
4799 /* Moves basic block BB after block AFTER. */
4801 static bool
4802 tree_move_block_after (basic_block bb, basic_block after)
4804 if (bb->prev_bb == after)
4805 return true;
4807 unlink_block (bb);
4808 link_block (bb, after);
4810 return true;
4814 /* Return true if basic_block can be duplicated. */
4816 static bool
4817 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4819 return true;
4822 /* Create a duplicate of the basic block BB. NOTE: This does not
4823 preserve SSA form. */
4825 static basic_block
4826 tree_duplicate_bb (basic_block bb)
4828 basic_block new_bb;
4829 block_stmt_iterator bsi, bsi_tgt;
4830 tree phi, val;
4831 ssa_op_iter op_iter;
4833 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4835 /* First copy the phi nodes. We do not copy phi node arguments here,
4836 since the edges are not ready yet. Keep the chain of phi nodes in
4837 the same order, so that we can add them later. */
4838 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4840 mark_for_rewrite (PHI_RESULT (phi));
4841 create_phi_node (PHI_RESULT (phi), new_bb);
4843 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4845 bsi_tgt = bsi_start (new_bb);
4846 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4848 tree stmt = bsi_stmt (bsi);
4849 tree copy;
4851 if (TREE_CODE (stmt) == LABEL_EXPR)
4852 continue;
4854 /* Record the definitions. */
4855 get_stmt_operands (stmt);
4857 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4858 mark_for_rewrite (val);
4860 copy = unshare_expr (stmt);
4862 /* Copy also the virtual operands. */
4863 get_stmt_ann (copy);
4864 copy_virtual_operands (copy, stmt);
4866 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4869 return new_bb;
4872 /* Basic block BB_COPY was created by code duplication. Add phi node
4873 arguments for edges going out of BB_COPY. The blocks that were
4874 duplicated have rbi->duplicated set to one. */
4876 void
4877 add_phi_args_after_copy_bb (basic_block bb_copy)
4879 basic_block bb, dest;
4880 edge e, e_copy;
4881 edge_iterator ei;
4882 tree phi, phi_copy, phi_next, def;
4884 bb = bb_copy->rbi->original;
4886 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4888 if (!phi_nodes (e_copy->dest))
4889 continue;
4891 if (e_copy->dest->rbi->duplicated)
4892 dest = e_copy->dest->rbi->original;
4893 else
4894 dest = e_copy->dest;
4896 e = find_edge (bb, dest);
4897 if (!e)
4899 /* During loop unrolling the target of the latch edge is copied.
4900 In this case we are not looking for edge to dest, but to
4901 duplicated block whose original was dest. */
4902 FOR_EACH_EDGE (e, ei, bb->succs)
4903 if (e->dest->rbi->duplicated
4904 && e->dest->rbi->original == dest)
4905 break;
4907 gcc_assert (e != NULL);
4910 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4911 phi;
4912 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4914 phi_next = PHI_CHAIN (phi);
4916 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4917 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4918 add_phi_arg (phi_copy, def, e_copy);
4923 /* Blocks in REGION_COPY array of length N_REGION were created by
4924 duplication of basic blocks. Add phi node arguments for edges
4925 going from these blocks. */
4927 void
4928 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4930 unsigned i;
4932 for (i = 0; i < n_region; i++)
4933 region_copy[i]->rbi->duplicated = 1;
4935 for (i = 0; i < n_region; i++)
4936 add_phi_args_after_copy_bb (region_copy[i]);
4938 for (i = 0; i < n_region; i++)
4939 region_copy[i]->rbi->duplicated = 0;
4942 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4944 struct ssa_name_map_entry
4946 tree from_name;
4947 tree to_name;
4950 /* Hash function for ssa_name_map_entry. */
4952 static hashval_t
4953 ssa_name_map_entry_hash (const void *entry)
4955 const struct ssa_name_map_entry *en = entry;
4956 return SSA_NAME_VERSION (en->from_name);
4959 /* Equality function for ssa_name_map_entry. */
4961 static int
4962 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4964 const struct ssa_name_map_entry *en = in_table;
4966 return en->from_name == ssa_name;
4969 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4970 to MAP. */
4972 void
4973 allocate_ssa_names (bitmap definitions, htab_t *map)
4975 tree name;
4976 struct ssa_name_map_entry *entry;
4977 PTR *slot;
4978 unsigned ver;
4979 bitmap_iterator bi;
4981 if (!*map)
4982 *map = htab_create (10, ssa_name_map_entry_hash,
4983 ssa_name_map_entry_eq, free);
4984 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4986 name = ssa_name (ver);
4987 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4988 INSERT);
4989 if (*slot)
4990 entry = *slot;
4991 else
4993 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4994 entry->from_name = name;
4995 *slot = entry;
4997 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
5001 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
5002 by the mapping MAP. */
5004 static void
5005 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
5007 tree name = DEF_FROM_PTR (def);
5008 struct ssa_name_map_entry *entry;
5010 gcc_assert (TREE_CODE (name) == SSA_NAME);
5012 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
5013 if (!entry)
5014 return;
5016 SET_DEF (def, entry->to_name);
5017 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
5020 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
5022 static void
5023 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
5025 tree name = USE_FROM_PTR (use);
5026 struct ssa_name_map_entry *entry;
5028 if (TREE_CODE (name) != SSA_NAME)
5029 return;
5031 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
5032 if (!entry)
5033 return;
5035 SET_USE (use, entry->to_name);
5038 /* Rewrite the ssa names in basic block BB to new ones as specified by the
5039 mapping MAP. */
5041 void
5042 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
5044 unsigned i;
5045 edge e;
5046 edge_iterator ei;
5047 tree phi, stmt;
5048 block_stmt_iterator bsi;
5049 use_optype uses;
5050 vuse_optype vuses;
5051 def_optype defs;
5052 v_may_def_optype v_may_defs;
5053 v_must_def_optype v_must_defs;
5054 stmt_ann_t ann;
5056 FOR_EACH_EDGE (e, ei, bb->preds)
5057 if (e->flags & EDGE_ABNORMAL)
5058 break;
5060 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5062 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
5063 if (e)
5064 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
5067 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5069 stmt = bsi_stmt (bsi);
5070 get_stmt_operands (stmt);
5071 ann = stmt_ann (stmt);
5073 uses = USE_OPS (ann);
5074 for (i = 0; i < NUM_USES (uses); i++)
5075 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
5077 defs = DEF_OPS (ann);
5078 for (i = 0; i < NUM_DEFS (defs); i++)
5079 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
5081 vuses = VUSE_OPS (ann);
5082 for (i = 0; i < NUM_VUSES (vuses); i++)
5083 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
5085 v_may_defs = V_MAY_DEF_OPS (ann);
5086 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
5088 rewrite_to_new_ssa_names_use
5089 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
5090 rewrite_to_new_ssa_names_def
5091 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
5094 v_must_defs = V_MUST_DEF_OPS (ann);
5095 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
5097 rewrite_to_new_ssa_names_def
5098 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
5099 rewrite_to_new_ssa_names_use
5100 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
5104 FOR_EACH_EDGE (e, ei, bb->succs)
5105 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
5107 rewrite_to_new_ssa_names_use
5108 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
5110 if (e->flags & EDGE_ABNORMAL)
5112 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
5113 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
5118 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
5119 by the mapping MAP. */
5121 void
5122 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
5124 unsigned r;
5126 for (r = 0; r < n_region; r++)
5127 rewrite_to_new_ssa_names_bb (region[r], map);
5130 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5131 important exit edge EXIT. By important we mean that no SSA name defined
5132 inside region is live over the other exit edges of the region. All entry
5133 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5134 to the duplicate of the region. SSA form, dominance and loop information
5135 is updated. The new basic blocks are stored to REGION_COPY in the same
5136 order as they had in REGION, provided that REGION_COPY is not NULL.
5137 The function returns false if it is unable to copy the region,
5138 true otherwise. */
5140 bool
5141 tree_duplicate_sese_region (edge entry, edge exit,
5142 basic_block *region, unsigned n_region,
5143 basic_block *region_copy)
5145 unsigned i, n_doms, ver;
5146 bool free_region_copy = false, copying_header = false;
5147 struct loop *loop = entry->dest->loop_father;
5148 edge exit_copy;
5149 bitmap definitions;
5150 tree phi;
5151 basic_block *doms;
5152 htab_t ssa_name_map = NULL;
5153 edge redirected;
5154 bitmap_iterator bi;
5156 if (!can_copy_bbs_p (region, n_region))
5157 return false;
5159 /* Some sanity checking. Note that we do not check for all possible
5160 missuses of the functions. I.e. if you ask to copy something weird,
5161 it will work, but the state of structures probably will not be
5162 correct. */
5164 for (i = 0; i < n_region; i++)
5166 /* We do not handle subloops, i.e. all the blocks must belong to the
5167 same loop. */
5168 if (region[i]->loop_father != loop)
5169 return false;
5171 if (region[i] != entry->dest
5172 && region[i] == loop->header)
5173 return false;
5176 loop->copy = loop;
5178 /* In case the function is used for loop header copying (which is the primary
5179 use), ensure that EXIT and its copy will be new latch and entry edges. */
5180 if (loop->header == entry->dest)
5182 copying_header = true;
5183 loop->copy = loop->outer;
5185 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5186 return false;
5188 for (i = 0; i < n_region; i++)
5189 if (region[i] != exit->src
5190 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5191 return false;
5194 if (!region_copy)
5196 region_copy = xmalloc (sizeof (basic_block) * n_region);
5197 free_region_copy = true;
5200 gcc_assert (!any_marked_for_rewrite_p ());
5202 /* Record blocks outside the region that are duplicated by something
5203 inside. */
5204 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
5205 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
5207 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
5208 definitions = marked_ssa_names ();
5210 if (copying_header)
5212 loop->header = exit->dest;
5213 loop->latch = exit->src;
5216 /* Redirect the entry and add the phi node arguments. */
5217 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
5218 gcc_assert (redirected != NULL);
5219 flush_pending_stmts (entry);
5221 /* Concerning updating of dominators: We must recount dominators
5222 for entry block and its copy. Anything that is outside of the region, but
5223 was dominated by something inside needs recounting as well. */
5224 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5225 doms[n_doms++] = entry->dest->rbi->original;
5226 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
5227 free (doms);
5229 /* Add the other phi node arguments. */
5230 add_phi_args_after_copy (region_copy, n_region);
5232 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
5233 uses, it should be possible to emit phi nodes just for definitions that
5234 are used outside region. */
5235 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
5237 tree name = ssa_name (ver);
5239 phi = create_phi_node (name, exit->dest);
5240 add_phi_arg (phi, name, exit);
5241 add_phi_arg (phi, name, exit_copy);
5243 SSA_NAME_DEF_STMT (name) = phi;
5246 /* And create new definitions inside region and its copy. TODO -- once we
5247 have immediate uses, it might be better to leave definitions in region
5248 unchanged, create new ssa names for phi nodes on exit, and rewrite
5249 the uses, to avoid changing the copied region. */
5250 allocate_ssa_names (definitions, &ssa_name_map);
5251 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
5252 allocate_ssa_names (definitions, &ssa_name_map);
5253 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
5254 htab_delete (ssa_name_map);
5256 if (free_region_copy)
5257 free (region_copy);
5259 unmark_all_for_rewrite ();
5260 BITMAP_FREE (definitions);
5262 return true;
5265 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5267 void
5268 dump_function_to_file (tree fn, FILE *file, int flags)
5270 tree arg, vars, var;
5271 bool ignore_topmost_bind = false, any_var = false;
5272 basic_block bb;
5273 tree chain;
5275 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5277 arg = DECL_ARGUMENTS (fn);
5278 while (arg)
5280 print_generic_expr (file, arg, dump_flags);
5281 if (TREE_CHAIN (arg))
5282 fprintf (file, ", ");
5283 arg = TREE_CHAIN (arg);
5285 fprintf (file, ")\n");
5287 if (flags & TDF_RAW)
5289 dump_node (fn, TDF_SLIM | flags, file);
5290 return;
5293 /* When GIMPLE is lowered, the variables are no longer available in
5294 BIND_EXPRs, so display them separately. */
5295 if (cfun && cfun->unexpanded_var_list)
5297 ignore_topmost_bind = true;
5299 fprintf (file, "{\n");
5300 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5302 var = TREE_VALUE (vars);
5304 print_generic_decl (file, var, flags);
5305 fprintf (file, "\n");
5307 any_var = true;
5311 if (basic_block_info)
5313 /* Make a CFG based dump. */
5314 check_bb_profile (ENTRY_BLOCK_PTR, file);
5315 if (!ignore_topmost_bind)
5316 fprintf (file, "{\n");
5318 if (any_var && n_basic_blocks)
5319 fprintf (file, "\n");
5321 FOR_EACH_BB (bb)
5322 dump_generic_bb (file, bb, 2, flags);
5324 fprintf (file, "}\n");
5325 check_bb_profile (EXIT_BLOCK_PTR, file);
5327 else
5329 int indent;
5331 /* Make a tree based dump. */
5332 chain = DECL_SAVED_TREE (fn);
5334 if (TREE_CODE (chain) == BIND_EXPR)
5336 if (ignore_topmost_bind)
5338 chain = BIND_EXPR_BODY (chain);
5339 indent = 2;
5341 else
5342 indent = 0;
5344 else
5346 if (!ignore_topmost_bind)
5347 fprintf (file, "{\n");
5348 indent = 2;
5351 if (any_var)
5352 fprintf (file, "\n");
5354 print_generic_stmt_indented (file, chain, flags, indent);
5355 if (ignore_topmost_bind)
5356 fprintf (file, "}\n");
5359 fprintf (file, "\n\n");
5363 /* Pretty print of the loops intermediate representation. */
5364 static void print_loop (FILE *, struct loop *, int);
5365 static void print_pred_bbs (FILE *, basic_block bb);
5366 static void print_succ_bbs (FILE *, basic_block bb);
5369 /* Print the predecessors indexes of edge E on FILE. */
5371 static void
5372 print_pred_bbs (FILE *file, basic_block bb)
5374 edge e;
5375 edge_iterator ei;
5377 FOR_EACH_EDGE (e, ei, bb->preds)
5378 fprintf (file, "bb_%d", e->src->index);
5382 /* Print the successors indexes of edge E on FILE. */
5384 static void
5385 print_succ_bbs (FILE *file, basic_block bb)
5387 edge e;
5388 edge_iterator ei;
5390 FOR_EACH_EDGE (e, ei, bb->succs)
5391 fprintf (file, "bb_%d", e->src->index);
5395 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5397 static void
5398 print_loop (FILE *file, struct loop *loop, int indent)
5400 char *s_indent;
5401 basic_block bb;
5403 if (loop == NULL)
5404 return;
5406 s_indent = (char *) alloca ((size_t) indent + 1);
5407 memset ((void *) s_indent, ' ', (size_t) indent);
5408 s_indent[indent] = '\0';
5410 /* Print the loop's header. */
5411 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5413 /* Print the loop's body. */
5414 fprintf (file, "%s{\n", s_indent);
5415 FOR_EACH_BB (bb)
5416 if (bb->loop_father == loop)
5418 /* Print the basic_block's header. */
5419 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5420 print_pred_bbs (file, bb);
5421 fprintf (file, "}, succs = {");
5422 print_succ_bbs (file, bb);
5423 fprintf (file, "})\n");
5425 /* Print the basic_block's body. */
5426 fprintf (file, "%s {\n", s_indent);
5427 tree_dump_bb (bb, file, indent + 4);
5428 fprintf (file, "%s }\n", s_indent);
5431 print_loop (file, loop->inner, indent + 2);
5432 fprintf (file, "%s}\n", s_indent);
5433 print_loop (file, loop->next, indent);
5437 /* Follow a CFG edge from the entry point of the program, and on entry
5438 of a loop, pretty print the loop structure on FILE. */
5440 void
5441 print_loop_ir (FILE *file)
5443 basic_block bb;
5445 bb = BASIC_BLOCK (0);
5446 if (bb && bb->loop_father)
5447 print_loop (file, bb->loop_father, 0);
5451 /* Debugging loops structure at tree level. */
5453 void
5454 debug_loop_ir (void)
5456 print_loop_ir (stderr);
5460 /* Return true if BB ends with a call, possibly followed by some
5461 instructions that must stay with the call. Return false,
5462 otherwise. */
5464 static bool
5465 tree_block_ends_with_call_p (basic_block bb)
5467 block_stmt_iterator bsi = bsi_last (bb);
5468 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5472 /* Return true if BB ends with a conditional branch. Return false,
5473 otherwise. */
5475 static bool
5476 tree_block_ends_with_condjump_p (basic_block bb)
5478 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5479 return (TREE_CODE (stmt) == COND_EXPR);
5483 /* Return true if we need to add fake edge to exit at statement T.
5484 Helper function for tree_flow_call_edges_add. */
5486 static bool
5487 need_fake_edge_p (tree t)
5489 tree call;
5491 /* NORETURN and LONGJMP calls already have an edge to exit.
5492 CONST and PURE calls do not need one.
5493 We don't currently check for CONST and PURE here, although
5494 it would be a good idea, because those attributes are
5495 figured out from the RTL in mark_constant_function, and
5496 the counter incrementation code from -fprofile-arcs
5497 leads to different results from -fbranch-probabilities. */
5498 call = get_call_expr_in (t);
5499 if (call
5500 && !(call_expr_flags (call) & ECF_NORETURN))
5501 return true;
5503 if (TREE_CODE (t) == ASM_EXPR
5504 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5505 return true;
5507 return false;
5511 /* Add fake edges to the function exit for any non constant and non
5512 noreturn calls, volatile inline assembly in the bitmap of blocks
5513 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5514 the number of blocks that were split.
5516 The goal is to expose cases in which entering a basic block does
5517 not imply that all subsequent instructions must be executed. */
5519 static int
5520 tree_flow_call_edges_add (sbitmap blocks)
5522 int i;
5523 int blocks_split = 0;
5524 int last_bb = last_basic_block;
5525 bool check_last_block = false;
5527 if (n_basic_blocks == 0)
5528 return 0;
5530 if (! blocks)
5531 check_last_block = true;
5532 else
5533 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5535 /* In the last basic block, before epilogue generation, there will be
5536 a fallthru edge to EXIT. Special care is required if the last insn
5537 of the last basic block is a call because make_edge folds duplicate
5538 edges, which would result in the fallthru edge also being marked
5539 fake, which would result in the fallthru edge being removed by
5540 remove_fake_edges, which would result in an invalid CFG.
5542 Moreover, we can't elide the outgoing fake edge, since the block
5543 profiler needs to take this into account in order to solve the minimal
5544 spanning tree in the case that the call doesn't return.
5546 Handle this by adding a dummy instruction in a new last basic block. */
5547 if (check_last_block)
5549 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5550 block_stmt_iterator bsi = bsi_last (bb);
5551 tree t = NULL_TREE;
5552 if (!bsi_end_p (bsi))
5553 t = bsi_stmt (bsi);
5555 if (need_fake_edge_p (t))
5557 edge e;
5559 e = find_edge (bb, EXIT_BLOCK_PTR);
5560 if (e)
5562 bsi_insert_on_edge (e, build_empty_stmt ());
5563 bsi_commit_edge_inserts ();
5568 /* Now add fake edges to the function exit for any non constant
5569 calls since there is no way that we can determine if they will
5570 return or not... */
5571 for (i = 0; i < last_bb; i++)
5573 basic_block bb = BASIC_BLOCK (i);
5574 block_stmt_iterator bsi;
5575 tree stmt, last_stmt;
5577 if (!bb)
5578 continue;
5580 if (blocks && !TEST_BIT (blocks, i))
5581 continue;
5583 bsi = bsi_last (bb);
5584 if (!bsi_end_p (bsi))
5586 last_stmt = bsi_stmt (bsi);
5589 stmt = bsi_stmt (bsi);
5590 if (need_fake_edge_p (stmt))
5592 edge e;
5593 /* The handling above of the final block before the
5594 epilogue should be enough to verify that there is
5595 no edge to the exit block in CFG already.
5596 Calling make_edge in such case would cause us to
5597 mark that edge as fake and remove it later. */
5598 #ifdef ENABLE_CHECKING
5599 if (stmt == last_stmt)
5601 e = find_edge (bb, EXIT_BLOCK_PTR);
5602 gcc_assert (e == NULL);
5604 #endif
5606 /* Note that the following may create a new basic block
5607 and renumber the existing basic blocks. */
5608 if (stmt != last_stmt)
5610 e = split_block (bb, stmt);
5611 if (e)
5612 blocks_split++;
5614 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5616 bsi_prev (&bsi);
5618 while (!bsi_end_p (bsi));
5622 if (blocks_split)
5623 verify_flow_info ();
5625 return blocks_split;
5628 bool
5629 tree_purge_dead_eh_edges (basic_block bb)
5631 bool changed = false;
5632 edge e;
5633 edge_iterator ei;
5634 tree stmt = last_stmt (bb);
5636 if (stmt && tree_can_throw_internal (stmt))
5637 return false;
5639 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5641 if (e->flags & EDGE_EH)
5643 remove_edge (e);
5644 changed = true;
5646 else
5647 ei_next (&ei);
5650 /* Removal of dead EH edges might change dominators of not
5651 just immediate successors. E.g. when bb1 is changed so that
5652 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5653 eh edges purged by this function in:
5657 1-->2
5658 / \ |
5659 v v |
5660 3-->4 |
5662 --->5
5665 idom(bb5) must be recomputed. For now just free the dominance
5666 info. */
5667 if (changed)
5668 free_dominance_info (CDI_DOMINATORS);
5670 return changed;
5673 bool
5674 tree_purge_all_dead_eh_edges (bitmap blocks)
5676 bool changed = false;
5677 unsigned i;
5678 bitmap_iterator bi;
5680 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5682 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5685 return changed;
5688 /* This function is called whenever a new edge is created or
5689 redirected. */
5691 static void
5692 tree_execute_on_growing_pred (edge e)
5694 basic_block bb = e->dest;
5696 if (phi_nodes (bb))
5697 reserve_phi_args_for_new_edge (bb);
5700 /* This function is called immediately before edge E is removed from
5701 the edge vector E->dest->preds. */
5703 static void
5704 tree_execute_on_shrinking_pred (edge e)
5706 if (phi_nodes (e->dest))
5707 remove_phi_args (e);
5710 /*---------------------------------------------------------------------------
5711 Helper functions for Loop versioning
5712 ---------------------------------------------------------------------------*/
5714 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5715 of 'first'. Both of them are dominated by 'new_head' basic block. When
5716 'new_head' was created by 'second's incoming edge it received phi arguments
5717 on the edge by split_edge(). Later, additional edge 'e' was created to
5718 connect 'new_head' and 'first'. Now this routine adds phi args on this
5719 additional edge 'e' that new_head to second edge received as part of edge
5720 splitting.
5723 static void
5724 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5725 basic_block new_head, edge e)
5727 tree phi1, phi2;
5729 /* Browse all 'second' basic block phi nodes and add phi args to
5730 edge 'e' for 'first' head. PHI args are always in correct order. */
5732 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5733 phi2 && phi1;
5734 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5736 edge e2 = find_edge (new_head, second);
5738 if (e2)
5740 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5741 add_phi_arg (phi1, def, e);
5746 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5747 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5748 the destination of the ELSE part. */
5749 static void
5750 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5751 basic_block cond_bb, void *cond_e)
5753 block_stmt_iterator bsi;
5754 tree goto1 = NULL_TREE;
5755 tree goto2 = NULL_TREE;
5756 tree new_cond_expr = NULL_TREE;
5757 tree cond_expr = (tree) cond_e;
5758 edge e0;
5760 /* Build new conditional expr */
5761 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5762 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5763 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5765 /* Add new cond in cond_bb. */
5766 bsi = bsi_start (cond_bb);
5767 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5768 /* Adjust edges appropriately to connect new head with first head
5769 as well as second head. */
5770 e0 = single_succ_edge (cond_bb);
5771 e0->flags &= ~EDGE_FALLTHRU;
5772 e0->flags |= EDGE_FALSE_VALUE;
5775 struct cfg_hooks tree_cfg_hooks = {
5776 "tree",
5777 tree_verify_flow_info,
5778 tree_dump_bb, /* dump_bb */
5779 create_bb, /* create_basic_block */
5780 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5781 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5782 remove_bb, /* delete_basic_block */
5783 tree_split_block, /* split_block */
5784 tree_move_block_after, /* move_block_after */
5785 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5786 tree_merge_blocks, /* merge_blocks */
5787 tree_predict_edge, /* predict_edge */
5788 tree_predicted_by_p, /* predicted_by_p */
5789 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5790 tree_duplicate_bb, /* duplicate_block */
5791 tree_split_edge, /* split_edge */
5792 tree_make_forwarder_block, /* make_forward_block */
5793 NULL, /* tidy_fallthru_edge */
5794 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5795 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5796 tree_flow_call_edges_add, /* flow_call_edges_add */
5797 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5798 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5799 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5800 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5801 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5802 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5803 flush_pending_stmts /* flush_pending_stmts */
5807 /* Split all critical edges. */
5809 static void
5810 split_critical_edges (void)
5812 basic_block bb;
5813 edge e;
5814 edge_iterator ei;
5816 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5817 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5818 mappings around the calls to split_edge. */
5819 start_recording_case_labels ();
5820 FOR_ALL_BB (bb)
5822 FOR_EACH_EDGE (e, ei, bb->succs)
5823 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5825 split_edge (e);
5828 end_recording_case_labels ();
5831 struct tree_opt_pass pass_split_crit_edges =
5833 "crited", /* name */
5834 NULL, /* gate */
5835 split_critical_edges, /* execute */
5836 NULL, /* sub */
5837 NULL, /* next */
5838 0, /* static_pass_number */
5839 TV_TREE_SPLIT_EDGES, /* tv_id */
5840 PROP_cfg, /* properties required */
5841 PROP_no_crit_edges, /* properties_provided */
5842 0, /* properties_destroyed */
5843 0, /* todo_flags_start */
5844 TODO_dump_func, /* todo_flags_finish */
5845 0 /* letter */
5849 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5850 a temporary, make sure and register it to be renamed if necessary,
5851 and finally return the temporary. Put the statements to compute
5852 EXP before the current statement in BSI. */
5854 tree
5855 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5857 tree t, new_stmt, orig_stmt;
5859 if (is_gimple_val (exp))
5860 return exp;
5862 t = make_rename_temp (type, NULL);
5863 new_stmt = build (MODIFY_EXPR, type, t, exp);
5865 orig_stmt = bsi_stmt (*bsi);
5866 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5867 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5869 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5871 return t;
5874 /* Build a ternary operation and gimplify it. Emit code before BSI.
5875 Return the gimple_val holding the result. */
5877 tree
5878 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5879 tree type, tree a, tree b, tree c)
5881 tree ret;
5883 ret = fold (build3 (code, type, a, b, c));
5884 STRIP_NOPS (ret);
5886 return gimplify_val (bsi, type, ret);
5889 /* Build a binary operation and gimplify it. Emit code before BSI.
5890 Return the gimple_val holding the result. */
5892 tree
5893 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5894 tree type, tree a, tree b)
5896 tree ret;
5898 ret = fold (build2 (code, type, a, b));
5899 STRIP_NOPS (ret);
5901 return gimplify_val (bsi, type, ret);
5904 /* Build a unary operation and gimplify it. Emit code before BSI.
5905 Return the gimple_val holding the result. */
5907 tree
5908 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5909 tree a)
5911 tree ret;
5913 ret = fold (build1 (code, type, a));
5914 STRIP_NOPS (ret);
5916 return gimplify_val (bsi, type, ret);
5921 /* Emit return warnings. */
5923 static void
5924 execute_warn_function_return (void)
5926 #ifdef USE_MAPPED_LOCATION
5927 source_location location;
5928 #else
5929 location_t *locus;
5930 #endif
5931 tree last;
5932 edge e;
5933 edge_iterator ei;
5935 if (warn_missing_noreturn
5936 && !TREE_THIS_VOLATILE (cfun->decl)
5937 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5938 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5939 warning ("%Jfunction might be possible candidate for "
5940 "attribute %<noreturn%>",
5941 cfun->decl);
5943 /* If we have a path to EXIT, then we do return. */
5944 if (TREE_THIS_VOLATILE (cfun->decl)
5945 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5947 #ifdef USE_MAPPED_LOCATION
5948 location = UNKNOWN_LOCATION;
5949 #else
5950 locus = NULL;
5951 #endif
5952 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5954 last = last_stmt (e->src);
5955 if (TREE_CODE (last) == RETURN_EXPR
5956 #ifdef USE_MAPPED_LOCATION
5957 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5958 #else
5959 && (locus = EXPR_LOCUS (last)) != NULL)
5960 #endif
5961 break;
5963 #ifdef USE_MAPPED_LOCATION
5964 if (location == UNKNOWN_LOCATION)
5965 location = cfun->function_end_locus;
5966 warning ("%H%<noreturn%> function does return", &location);
5967 #else
5968 if (!locus)
5969 locus = &cfun->function_end_locus;
5970 warning ("%H%<noreturn%> function does return", locus);
5971 #endif
5974 /* If we see "return;" in some basic block, then we do reach the end
5975 without returning a value. */
5976 else if (warn_return_type
5977 && !TREE_NO_WARNING (cfun->decl)
5978 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5979 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5981 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5983 tree last = last_stmt (e->src);
5984 if (TREE_CODE (last) == RETURN_EXPR
5985 && TREE_OPERAND (last, 0) == NULL)
5987 #ifdef USE_MAPPED_LOCATION
5988 location = EXPR_LOCATION (last);
5989 if (location == UNKNOWN_LOCATION)
5990 location = cfun->function_end_locus;
5991 warning ("%Hcontrol reaches end of non-void function", &location);
5992 #else
5993 locus = EXPR_LOCUS (last);
5994 if (!locus)
5995 locus = &cfun->function_end_locus;
5996 warning ("%Hcontrol reaches end of non-void function", locus);
5997 #endif
5998 TREE_NO_WARNING (cfun->decl) = 1;
5999 break;
6006 /* Given a basic block B which ends with a conditional and has
6007 precisely two successors, determine which of the edges is taken if
6008 the conditional is true and which is taken if the conditional is
6009 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
6011 void
6012 extract_true_false_edges_from_block (basic_block b,
6013 edge *true_edge,
6014 edge *false_edge)
6016 edge e = EDGE_SUCC (b, 0);
6018 if (e->flags & EDGE_TRUE_VALUE)
6020 *true_edge = e;
6021 *false_edge = EDGE_SUCC (b, 1);
6023 else
6025 *false_edge = e;
6026 *true_edge = EDGE_SUCC (b, 1);
6030 struct tree_opt_pass pass_warn_function_return =
6032 NULL, /* name */
6033 NULL, /* gate */
6034 execute_warn_function_return, /* execute */
6035 NULL, /* sub */
6036 NULL, /* next */
6037 0, /* static_pass_number */
6038 0, /* tv_id */
6039 PROP_cfg, /* properties_required */
6040 0, /* properties_provided */
6041 0, /* properties_destroyed */
6042 0, /* todo_flags_start */
6043 0, /* todo_flags_finish */
6044 0 /* letter */