* tree-cfg.c (dump_function_to_file): Use cfun info only if it
[official-gcc.git] / gcc / tree-cfg.c
blob28af511d175f6c7588a791705e29d5b4c1babb2b
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 struct edge_to_cases_elt
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
82 static htab_t edge_to_cases;
84 /* CFG statistics. */
85 struct cfg_stats_d
87 long num_merged_labels;
90 static struct cfg_stats_d cfg_stats;
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void create_block_annotation (basic_block);
98 static void free_blocks_annotations (void);
99 static void clear_blocks_annotations (void);
100 static void make_blocks (tree);
101 static void factor_computed_gotos (void);
103 /* Edges. */
104 static void make_edges (void);
105 static void make_ctrl_stmt_edges (basic_block);
106 static void make_exit_edges (basic_block);
107 static void make_cond_expr_edges (basic_block);
108 static void make_switch_expr_edges (basic_block);
109 static void make_goto_expr_edges (basic_block);
110 static edge tree_redirect_edge_and_branch (edge, basic_block);
111 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
112 static void split_critical_edges (void);
113 static bool remove_fallthru_edge (VEC(edge) *);
115 /* Various helpers. */
116 static inline bool stmt_starts_bb_p (tree, tree);
117 static int tree_verify_flow_info (void);
118 static void tree_make_forwarder_block (edge);
119 static bool tree_forwarder_block_p (basic_block, bool);
120 static void tree_cfg2vcg (FILE *);
122 /* Flowgraph optimization and cleanup. */
123 static void tree_merge_blocks (basic_block, basic_block);
124 static bool tree_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static bool cleanup_control_flow (void);
127 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
128 static edge find_taken_edge_computed_goto (basic_block, tree);
129 static edge find_taken_edge_cond_expr (basic_block, tree);
130 static edge find_taken_edge_switch_expr (basic_block, tree);
131 static tree find_case_label_for_value (tree, tree);
132 static bool phi_alternatives_equal (basic_block, edge, edge);
133 static bool cleanup_forwarder_blocks (void);
136 /*---------------------------------------------------------------------------
137 Create basic blocks
138 ---------------------------------------------------------------------------*/
140 /* Entry point to the CFG builder for trees. TP points to the list of
141 statements to be added to the flowgraph. */
143 static void
144 build_tree_cfg (tree *tp)
146 /* Register specific tree functions. */
147 tree_register_cfg_hooks ();
149 /* Initialize the basic block array. */
150 init_flow ();
151 profile_status = PROFILE_ABSENT;
152 n_basic_blocks = 0;
153 last_basic_block = 0;
154 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
155 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
157 /* Build a mapping of labels to their associated blocks. */
158 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
159 "label to block map");
161 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
162 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
164 found_computed_goto = 0;
165 make_blocks (*tp);
167 /* Computed gotos are hell to deal with, especially if there are
168 lots of them with a large number of destinations. So we factor
169 them to a common computed goto location before we build the
170 edge list. After we convert back to normal form, we will un-factor
171 the computed gotos since factoring introduces an unwanted jump. */
172 if (found_computed_goto)
173 factor_computed_gotos ();
175 /* Make sure there is always at least one block, even if it's empty. */
176 if (n_basic_blocks == 0)
177 create_empty_bb (ENTRY_BLOCK_PTR);
179 create_block_annotation (ENTRY_BLOCK_PTR);
180 create_block_annotation (EXIT_BLOCK_PTR);
182 /* Adjust the size of the array. */
183 VARRAY_GROW (basic_block_info, n_basic_blocks);
185 /* To speed up statement iterator walks, we first purge dead labels. */
186 cleanup_dead_labels ();
188 /* Group case nodes to reduce the number of edges.
189 We do this after cleaning up dead labels because otherwise we miss
190 a lot of obvious case merging opportunities. */
191 group_case_labels ();
193 /* Create the edges of the flowgraph. */
194 make_edges ();
196 /* Debugging dumps. */
198 /* Write the flowgraph to a VCG file. */
200 int local_dump_flags;
201 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
202 if (dump_file)
204 tree_cfg2vcg (dump_file);
205 dump_end (TDI_vcg, dump_file);
209 /* Dump a textual representation of the flowgraph. */
210 if (dump_file)
211 dump_tree_cfg (dump_file, dump_flags);
214 static void
215 execute_build_cfg (void)
217 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
220 struct tree_opt_pass pass_build_cfg =
222 "cfg", /* name */
223 NULL, /* gate */
224 execute_build_cfg, /* execute */
225 NULL, /* sub */
226 NULL, /* next */
227 0, /* static_pass_number */
228 TV_TREE_CFG, /* tv_id */
229 PROP_gimple_leh, /* properties_required */
230 PROP_cfg, /* properties_provided */
231 0, /* properties_destroyed */
232 0, /* todo_flags_start */
233 TODO_verify_stmts, /* todo_flags_finish */
234 0 /* letter */
237 /* Search the CFG for any computed gotos. If found, factor them to a
238 common computed goto site. Also record the location of that site so
239 that we can un-factor the gotos after we have converted back to
240 normal form. */
242 static void
243 factor_computed_gotos (void)
245 basic_block bb;
246 tree factored_label_decl = NULL;
247 tree var = NULL;
248 tree factored_computed_goto_label = NULL;
249 tree factored_computed_goto = NULL;
251 /* We know there are one or more computed gotos in this function.
252 Examine the last statement in each basic block to see if the block
253 ends with a computed goto. */
255 FOR_EACH_BB (bb)
257 block_stmt_iterator bsi = bsi_last (bb);
258 tree last;
260 if (bsi_end_p (bsi))
261 continue;
262 last = bsi_stmt (bsi);
264 /* Ignore the computed goto we create when we factor the original
265 computed gotos. */
266 if (last == factored_computed_goto)
267 continue;
269 /* If the last statement is a computed goto, factor it. */
270 if (computed_goto_p (last))
272 tree assignment;
274 /* The first time we find a computed goto we need to create
275 the factored goto block and the variable each original
276 computed goto will use for their goto destination. */
277 if (! factored_computed_goto)
279 basic_block new_bb = create_empty_bb (bb);
280 block_stmt_iterator new_bsi = bsi_start (new_bb);
282 /* Create the destination of the factored goto. Each original
283 computed goto will put its desired destination into this
284 variable and jump to the label we create immediately
285 below. */
286 var = create_tmp_var (ptr_type_node, "gotovar");
288 /* Build a label for the new block which will contain the
289 factored computed goto. */
290 factored_label_decl = create_artificial_label ();
291 factored_computed_goto_label
292 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
293 bsi_insert_after (&new_bsi, factored_computed_goto_label,
294 BSI_NEW_STMT);
296 /* Build our new computed goto. */
297 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
298 bsi_insert_after (&new_bsi, factored_computed_goto,
299 BSI_NEW_STMT);
302 /* Copy the original computed goto's destination into VAR. */
303 assignment = build (MODIFY_EXPR, ptr_type_node,
304 var, GOTO_DESTINATION (last));
305 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
307 /* And re-vector the computed goto to the new destination. */
308 GOTO_DESTINATION (last) = factored_label_decl;
314 /* Create annotations for a single basic block. */
316 static void
317 create_block_annotation (basic_block bb)
319 /* Verify that the tree_annotations field is clear. */
320 gcc_assert (!bb->tree_annotations);
321 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
325 /* Free the annotations for all the basic blocks. */
327 static void free_blocks_annotations (void)
329 clear_blocks_annotations ();
333 /* Clear the annotations for all the basic blocks. */
335 static void
336 clear_blocks_annotations (void)
338 basic_block bb;
340 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
341 bb->tree_annotations = NULL;
345 /* Build a flowgraph for the statement_list STMT_LIST. */
347 static void
348 make_blocks (tree stmt_list)
350 tree_stmt_iterator i = tsi_start (stmt_list);
351 tree stmt = NULL;
352 bool start_new_block = true;
353 bool first_stmt_of_list = true;
354 basic_block bb = ENTRY_BLOCK_PTR;
356 while (!tsi_end_p (i))
358 tree prev_stmt;
360 prev_stmt = stmt;
361 stmt = tsi_stmt (i);
363 /* If the statement starts a new basic block or if we have determined
364 in a previous pass that we need to create a new block for STMT, do
365 so now. */
366 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
368 if (!first_stmt_of_list)
369 stmt_list = tsi_split_statement_list_before (&i);
370 bb = create_basic_block (stmt_list, NULL, bb);
371 start_new_block = false;
374 /* Now add STMT to BB and create the subgraphs for special statement
375 codes. */
376 set_bb_for_stmt (stmt, bb);
378 if (computed_goto_p (stmt))
379 found_computed_goto = true;
381 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
382 next iteration. */
383 if (stmt_ends_bb_p (stmt))
384 start_new_block = true;
386 tsi_next (&i);
387 first_stmt_of_list = false;
392 /* Create and return a new empty basic block after bb AFTER. */
394 static basic_block
395 create_bb (void *h, void *e, basic_block after)
397 basic_block bb;
399 gcc_assert (!e);
401 /* Create and initialize a new basic block. Since alloc_block uses
402 ggc_alloc_cleared to allocate a basic block, we do not have to
403 clear the newly allocated basic block here. */
404 bb = alloc_block ();
406 bb->index = last_basic_block;
407 bb->flags = BB_NEW;
408 bb->stmt_list = h ? h : alloc_stmt_list ();
410 /* Add the new block to the linked list of blocks. */
411 link_block (bb, after);
413 /* Grow the basic block array if needed. */
414 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
416 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
417 VARRAY_GROW (basic_block_info, new_size);
420 /* Add the newly created block to the array. */
421 BASIC_BLOCK (last_basic_block) = bb;
423 create_block_annotation (bb);
425 n_basic_blocks++;
426 last_basic_block++;
428 initialize_bb_rbi (bb);
429 return bb;
433 /*---------------------------------------------------------------------------
434 Edge creation
435 ---------------------------------------------------------------------------*/
437 /* Fold COND_EXPR_COND of each COND_EXPR. */
439 static void
440 fold_cond_expr_cond (void)
442 basic_block bb;
444 FOR_EACH_BB (bb)
446 tree stmt = last_stmt (bb);
448 if (stmt
449 && TREE_CODE (stmt) == COND_EXPR)
451 tree cond = fold (COND_EXPR_COND (stmt));
452 if (integer_zerop (cond))
453 COND_EXPR_COND (stmt) = boolean_false_node;
454 else if (integer_onep (cond))
455 COND_EXPR_COND (stmt) = boolean_true_node;
460 /* Join all the blocks in the flowgraph. */
462 static void
463 make_edges (void)
465 basic_block bb;
467 /* Create an edge from entry to the first block with executable
468 statements in it. */
469 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
471 /* Traverse the basic block array placing edges. */
472 FOR_EACH_BB (bb)
474 tree first = first_stmt (bb);
475 tree last = last_stmt (bb);
477 if (first)
479 /* Edges for statements that always alter flow control. */
480 if (is_ctrl_stmt (last))
481 make_ctrl_stmt_edges (bb);
483 /* Edges for statements that sometimes alter flow control. */
484 if (is_ctrl_altering_stmt (last))
485 make_exit_edges (bb);
488 /* Finally, if no edges were created above, this is a regular
489 basic block that only needs a fallthru edge. */
490 if (EDGE_COUNT (bb->succs) == 0)
491 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
494 /* We do not care about fake edges, so remove any that the CFG
495 builder inserted for completeness. */
496 remove_fake_exit_edges ();
498 /* Fold COND_EXPR_COND of each COND_EXPR. */
499 fold_cond_expr_cond ();
501 /* Clean up the graph and warn for unreachable code. */
502 cleanup_tree_cfg ();
506 /* Create edges for control statement at basic block BB. */
508 static void
509 make_ctrl_stmt_edges (basic_block bb)
511 tree last = last_stmt (bb);
513 gcc_assert (last);
514 switch (TREE_CODE (last))
516 case GOTO_EXPR:
517 make_goto_expr_edges (bb);
518 break;
520 case RETURN_EXPR:
521 make_edge (bb, EXIT_BLOCK_PTR, 0);
522 break;
524 case COND_EXPR:
525 make_cond_expr_edges (bb);
526 break;
528 case SWITCH_EXPR:
529 make_switch_expr_edges (bb);
530 break;
532 case RESX_EXPR:
533 make_eh_edges (last);
534 /* Yet another NORETURN hack. */
535 if (EDGE_COUNT (bb->succs) == 0)
536 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
537 break;
539 default:
540 gcc_unreachable ();
545 /* Create exit edges for statements in block BB that alter the flow of
546 control. Statements that alter the control flow are 'goto', 'return'
547 and calls to non-returning functions. */
549 static void
550 make_exit_edges (basic_block bb)
552 tree last = last_stmt (bb), op;
554 gcc_assert (last);
555 switch (TREE_CODE (last))
557 case CALL_EXPR:
558 /* If this function receives a nonlocal goto, then we need to
559 make edges from this call site to all the nonlocal goto
560 handlers. */
561 if (TREE_SIDE_EFFECTS (last)
562 && current_function_has_nonlocal_label)
563 make_goto_expr_edges (bb);
565 /* If this statement has reachable exception handlers, then
566 create abnormal edges to them. */
567 make_eh_edges (last);
569 /* Some calls are known not to return. For such calls we create
570 a fake edge.
572 We really need to revamp how we build edges so that it's not
573 such a bloody pain to avoid creating edges for this case since
574 all we do is remove these edges when we're done building the
575 CFG. */
576 if (call_expr_flags (last) & ECF_NORETURN)
578 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
579 return;
582 /* Don't forget the fall-thru edge. */
583 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
584 break;
586 case MODIFY_EXPR:
587 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
588 may have an abnormal edge. Search the RHS for this case and
589 create any required edges. */
590 op = get_call_expr_in (last);
591 if (op && TREE_SIDE_EFFECTS (op)
592 && current_function_has_nonlocal_label)
593 make_goto_expr_edges (bb);
595 make_eh_edges (last);
596 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
597 break;
599 default:
600 gcc_unreachable ();
605 /* Create the edges for a COND_EXPR starting at block BB.
606 At this point, both clauses must contain only simple gotos. */
608 static void
609 make_cond_expr_edges (basic_block bb)
611 tree entry = last_stmt (bb);
612 basic_block then_bb, else_bb;
613 tree then_label, else_label;
615 gcc_assert (entry);
616 gcc_assert (TREE_CODE (entry) == COND_EXPR);
618 /* Entry basic blocks for each component. */
619 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
620 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
621 then_bb = label_to_block (then_label);
622 else_bb = label_to_block (else_label);
624 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
625 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
628 /* Hashing routine for EDGE_TO_CASES. */
630 static hashval_t
631 edge_to_cases_hash (const void *p)
633 edge e = ((struct edge_to_cases_elt *)p)->e;
635 /* Hash on the edge itself (which is a pointer). */
636 return htab_hash_pointer (e);
639 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
640 for equality is just a pointer comparison. */
642 static int
643 edge_to_cases_eq (const void *p1, const void *p2)
645 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
646 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
648 return e1 == e2;
651 /* Called for each element in the hash table (P) as we delete the
652 edge to cases hash table.
654 Clear all the TREE_CHAINs to prevent problems with copying of
655 SWITCH_EXPRs and structure sharing rules, then free the hash table
656 element. */
658 static void
659 edge_to_cases_cleanup (void *p)
661 struct edge_to_cases_elt *elt = p;
662 tree t, next;
664 for (t = elt->case_labels; t; t = next)
666 next = TREE_CHAIN (t);
667 TREE_CHAIN (t) = NULL;
669 free (p);
672 /* Start recording information mapping edges to case labels. */
674 static void
675 start_recording_case_labels (void)
677 gcc_assert (edge_to_cases == NULL);
679 edge_to_cases = htab_create (37,
680 edge_to_cases_hash,
681 edge_to_cases_eq,
682 edge_to_cases_cleanup);
685 /* Return nonzero if we are recording information for case labels. */
687 static bool
688 recording_case_labels_p (void)
690 return (edge_to_cases != NULL);
693 /* Stop recording information mapping edges to case labels and
694 remove any information we have recorded. */
695 static void
696 end_recording_case_labels (void)
698 htab_delete (edge_to_cases);
699 edge_to_cases = NULL;
702 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
704 static void
705 record_switch_edge (edge e, tree case_label)
707 struct edge_to_cases_elt *elt;
708 void **slot;
710 /* Build a hash table element so we can see if E is already
711 in the table. */
712 elt = xmalloc (sizeof (struct edge_to_cases_elt));
713 elt->e = e;
714 elt->case_labels = case_label;
716 slot = htab_find_slot (edge_to_cases, elt, INSERT);
718 if (*slot == NULL)
720 /* E was not in the hash table. Install E into the hash table. */
721 *slot = (void *)elt;
723 else
725 /* E was already in the hash table. Free ELT as we do not need it
726 anymore. */
727 free (elt);
729 /* Get the entry stored in the hash table. */
730 elt = (struct edge_to_cases_elt *) *slot;
732 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
733 TREE_CHAIN (case_label) = elt->case_labels;
734 elt->case_labels = case_label;
738 /* If we are inside a {start,end}_recording_cases block, then return
739 a chain of CASE_LABEL_EXPRs from T which reference E.
741 Otherwise return NULL. */
743 static tree
744 get_cases_for_edge (edge e, tree t)
746 struct edge_to_cases_elt elt, *elt_p;
747 void **slot;
748 size_t i, n;
749 tree vec;
751 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
752 chains available. Return NULL so the caller can detect this case. */
753 if (!recording_case_labels_p ())
754 return NULL;
756 restart:
757 elt.e = e;
758 elt.case_labels = NULL;
759 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
761 if (slot)
763 elt_p = (struct edge_to_cases_elt *)*slot;
764 return elt_p->case_labels;
767 /* If we did not find E in the hash table, then this must be the first
768 time we have been queried for information about E & T. Add all the
769 elements from T to the hash table then perform the query again. */
771 vec = SWITCH_LABELS (t);
772 n = TREE_VEC_LENGTH (vec);
773 for (i = 0; i < n; i++)
775 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
776 basic_block label_bb = label_to_block (lab);
777 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
779 goto restart;
782 /* Create the edges for a SWITCH_EXPR starting at block BB.
783 At this point, the switch body has been lowered and the
784 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
786 static void
787 make_switch_expr_edges (basic_block bb)
789 tree entry = last_stmt (bb);
790 size_t i, n;
791 tree vec;
793 vec = SWITCH_LABELS (entry);
794 n = TREE_VEC_LENGTH (vec);
796 for (i = 0; i < n; ++i)
798 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
799 basic_block label_bb = label_to_block (lab);
800 make_edge (bb, label_bb, 0);
805 /* Return the basic block holding label DEST. */
807 basic_block
808 label_to_block_fn (struct function *ifun, tree dest)
810 int uid = LABEL_DECL_UID (dest);
812 /* We would die hard when faced by an undefined label. Emit a label to
813 the very first basic block. This will hopefully make even the dataflow
814 and undefined variable warnings quite right. */
815 if ((errorcount || sorrycount) && uid < 0)
817 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
818 tree stmt;
820 stmt = build1 (LABEL_EXPR, void_type_node, dest);
821 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
822 uid = LABEL_DECL_UID (dest);
824 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
827 /* Create edges for a goto statement at block BB. */
829 static void
830 make_goto_expr_edges (basic_block bb)
832 tree goto_t;
833 basic_block target_bb;
834 int for_call;
835 block_stmt_iterator last = bsi_last (bb);
837 goto_t = bsi_stmt (last);
839 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
840 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
841 from a nonlocal goto. */
842 if (TREE_CODE (goto_t) != GOTO_EXPR)
843 for_call = 1;
844 else
846 tree dest = GOTO_DESTINATION (goto_t);
847 for_call = 0;
849 /* A GOTO to a local label creates normal edges. */
850 if (simple_goto_p (goto_t))
852 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
853 #ifdef USE_MAPPED_LOCATION
854 e->goto_locus = EXPR_LOCATION (goto_t);
855 #else
856 e->goto_locus = EXPR_LOCUS (goto_t);
857 #endif
858 bsi_remove (&last);
859 return;
862 /* Nothing more to do for nonlocal gotos. */
863 if (TREE_CODE (dest) == LABEL_DECL)
864 return;
866 /* Computed gotos remain. */
869 /* Look for the block starting with the destination label. In the
870 case of a computed goto, make an edge to any label block we find
871 in the CFG. */
872 FOR_EACH_BB (target_bb)
874 block_stmt_iterator bsi;
876 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
878 tree target = bsi_stmt (bsi);
880 if (TREE_CODE (target) != LABEL_EXPR)
881 break;
883 if (
884 /* Computed GOTOs. Make an edge to every label block that has
885 been marked as a potential target for a computed goto. */
886 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
887 /* Nonlocal GOTO target. Make an edge to every label block
888 that has been marked as a potential target for a nonlocal
889 goto. */
890 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
892 make_edge (bb, target_bb, EDGE_ABNORMAL);
893 break;
898 /* Degenerate case of computed goto with no labels. */
899 if (!for_call && EDGE_COUNT (bb->succs) == 0)
900 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
904 /*---------------------------------------------------------------------------
905 Flowgraph analysis
906 ---------------------------------------------------------------------------*/
908 /* Remove unreachable blocks and other miscellaneous clean up work. */
910 bool
911 cleanup_tree_cfg (void)
913 bool retval = false;
915 timevar_push (TV_TREE_CLEANUP_CFG);
917 retval = cleanup_control_flow ();
918 retval |= delete_unreachable_blocks ();
920 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
921 which can get expensive. So we want to enable recording of edge
922 to CASE_LABEL_EXPR mappings around the call to
923 cleanup_forwarder_blocks. */
924 start_recording_case_labels ();
925 retval |= cleanup_forwarder_blocks ();
926 end_recording_case_labels ();
928 #ifdef ENABLE_CHECKING
929 if (retval)
931 gcc_assert (!cleanup_control_flow ());
932 gcc_assert (!delete_unreachable_blocks ());
933 gcc_assert (!cleanup_forwarder_blocks ());
935 #endif
937 /* Merging the blocks creates no new opportunities for the other
938 optimizations, so do it here. */
939 retval |= merge_seq_blocks ();
941 compact_blocks ();
943 #ifdef ENABLE_CHECKING
944 verify_flow_info ();
945 #endif
946 timevar_pop (TV_TREE_CLEANUP_CFG);
947 return retval;
951 /* Cleanup cfg and repair loop structures. */
953 void
954 cleanup_tree_cfg_loop (void)
956 bitmap changed_bbs = BITMAP_ALLOC (NULL);
958 cleanup_tree_cfg ();
960 fix_loop_structure (current_loops, changed_bbs);
961 calculate_dominance_info (CDI_DOMINATORS);
963 /* This usually does nothing. But sometimes parts of cfg that originally
964 were inside a loop get out of it due to edge removal (since they
965 become unreachable by back edges from latch). */
966 rewrite_into_loop_closed_ssa (changed_bbs);
968 BITMAP_FREE (changed_bbs);
970 #ifdef ENABLE_CHECKING
971 verify_loop_structure (current_loops);
972 #endif
975 /* Cleanup useless labels in basic blocks. This is something we wish
976 to do early because it allows us to group case labels before creating
977 the edges for the CFG, and it speeds up block statement iterators in
978 all passes later on.
979 We only run this pass once, running it more than once is probably not
980 profitable. */
982 /* A map from basic block index to the leading label of that block. */
983 static tree *label_for_bb;
985 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
986 static void
987 update_eh_label (struct eh_region *region)
989 tree old_label = get_eh_region_tree_label (region);
990 if (old_label)
992 tree new_label;
993 basic_block bb = label_to_block (old_label);
995 /* ??? After optimizing, there may be EH regions with labels
996 that have already been removed from the function body, so
997 there is no basic block for them. */
998 if (! bb)
999 return;
1001 new_label = label_for_bb[bb->index];
1002 set_eh_region_tree_label (region, new_label);
1006 /* Given LABEL return the first label in the same basic block. */
1007 static tree
1008 main_block_label (tree label)
1010 basic_block bb = label_to_block (label);
1012 /* label_to_block possibly inserted undefined label into the chain. */
1013 if (!label_for_bb[bb->index])
1014 label_for_bb[bb->index] = label;
1015 return label_for_bb[bb->index];
1018 /* Cleanup redundant labels. This is a three-step process:
1019 1) Find the leading label for each block.
1020 2) Redirect all references to labels to the leading labels.
1021 3) Cleanup all useless labels. */
1023 void
1024 cleanup_dead_labels (void)
1026 basic_block bb;
1027 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1029 /* Find a suitable label for each block. We use the first user-defined
1030 label if there is one, or otherwise just the first label we see. */
1031 FOR_EACH_BB (bb)
1033 block_stmt_iterator i;
1035 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1037 tree label, stmt = bsi_stmt (i);
1039 if (TREE_CODE (stmt) != LABEL_EXPR)
1040 break;
1042 label = LABEL_EXPR_LABEL (stmt);
1044 /* If we have not yet seen a label for the current block,
1045 remember this one and see if there are more labels. */
1046 if (! label_for_bb[bb->index])
1048 label_for_bb[bb->index] = label;
1049 continue;
1052 /* If we did see a label for the current block already, but it
1053 is an artificially created label, replace it if the current
1054 label is a user defined label. */
1055 if (! DECL_ARTIFICIAL (label)
1056 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1058 label_for_bb[bb->index] = label;
1059 break;
1064 /* Now redirect all jumps/branches to the selected label.
1065 First do so for each block ending in a control statement. */
1066 FOR_EACH_BB (bb)
1068 tree stmt = last_stmt (bb);
1069 if (!stmt)
1070 continue;
1072 switch (TREE_CODE (stmt))
1074 case COND_EXPR:
1076 tree true_branch, false_branch;
1078 true_branch = COND_EXPR_THEN (stmt);
1079 false_branch = COND_EXPR_ELSE (stmt);
1081 GOTO_DESTINATION (true_branch)
1082 = main_block_label (GOTO_DESTINATION (true_branch));
1083 GOTO_DESTINATION (false_branch)
1084 = main_block_label (GOTO_DESTINATION (false_branch));
1086 break;
1089 case SWITCH_EXPR:
1091 size_t i;
1092 tree vec = SWITCH_LABELS (stmt);
1093 size_t n = TREE_VEC_LENGTH (vec);
1095 /* Replace all destination labels. */
1096 for (i = 0; i < n; ++i)
1098 tree elt = TREE_VEC_ELT (vec, i);
1099 tree label = main_block_label (CASE_LABEL (elt));
1100 CASE_LABEL (elt) = label;
1102 break;
1105 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1106 remove them until after we've created the CFG edges. */
1107 case GOTO_EXPR:
1108 if (! computed_goto_p (stmt))
1110 GOTO_DESTINATION (stmt)
1111 = main_block_label (GOTO_DESTINATION (stmt));
1112 break;
1115 default:
1116 break;
1120 for_each_eh_region (update_eh_label);
1122 /* Finally, purge dead labels. All user-defined labels and labels that
1123 can be the target of non-local gotos are preserved. */
1124 FOR_EACH_BB (bb)
1126 block_stmt_iterator i;
1127 tree label_for_this_bb = label_for_bb[bb->index];
1129 if (! label_for_this_bb)
1130 continue;
1132 for (i = bsi_start (bb); !bsi_end_p (i); )
1134 tree label, stmt = bsi_stmt (i);
1136 if (TREE_CODE (stmt) != LABEL_EXPR)
1137 break;
1139 label = LABEL_EXPR_LABEL (stmt);
1141 if (label == label_for_this_bb
1142 || ! DECL_ARTIFICIAL (label)
1143 || DECL_NONLOCAL (label))
1144 bsi_next (&i);
1145 else
1146 bsi_remove (&i);
1150 free (label_for_bb);
1153 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1154 and scan the sorted vector of cases. Combine the ones jumping to the
1155 same label.
1156 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1158 void
1159 group_case_labels (void)
1161 basic_block bb;
1163 FOR_EACH_BB (bb)
1165 tree stmt = last_stmt (bb);
1166 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1168 tree labels = SWITCH_LABELS (stmt);
1169 int old_size = TREE_VEC_LENGTH (labels);
1170 int i, j, new_size = old_size;
1171 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1172 tree default_label;
1174 /* The default label is always the last case in a switch
1175 statement after gimplification. */
1176 default_label = CASE_LABEL (default_case);
1178 /* Look for possible opportunities to merge cases.
1179 Ignore the last element of the label vector because it
1180 must be the default case. */
1181 i = 0;
1182 while (i < old_size - 1)
1184 tree base_case, base_label, base_high;
1185 base_case = TREE_VEC_ELT (labels, i);
1187 gcc_assert (base_case);
1188 base_label = CASE_LABEL (base_case);
1190 /* Discard cases that have the same destination as the
1191 default case. */
1192 if (base_label == default_label)
1194 TREE_VEC_ELT (labels, i) = NULL_TREE;
1195 i++;
1196 new_size--;
1197 continue;
1200 base_high = CASE_HIGH (base_case) ?
1201 CASE_HIGH (base_case) : CASE_LOW (base_case);
1202 i++;
1203 /* Try to merge case labels. Break out when we reach the end
1204 of the label vector or when we cannot merge the next case
1205 label with the current one. */
1206 while (i < old_size - 1)
1208 tree merge_case = TREE_VEC_ELT (labels, i);
1209 tree merge_label = CASE_LABEL (merge_case);
1210 tree t = int_const_binop (PLUS_EXPR, base_high,
1211 integer_one_node, 1);
1213 /* Merge the cases if they jump to the same place,
1214 and their ranges are consecutive. */
1215 if (merge_label == base_label
1216 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1218 base_high = CASE_HIGH (merge_case) ?
1219 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1220 CASE_HIGH (base_case) = base_high;
1221 TREE_VEC_ELT (labels, i) = NULL_TREE;
1222 new_size--;
1223 i++;
1225 else
1226 break;
1230 /* Compress the case labels in the label vector, and adjust the
1231 length of the vector. */
1232 for (i = 0, j = 0; i < new_size; i++)
1234 while (! TREE_VEC_ELT (labels, j))
1235 j++;
1236 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1238 TREE_VEC_LENGTH (labels) = new_size;
1243 /* Checks whether we can merge block B into block A. */
1245 static bool
1246 tree_can_merge_blocks_p (basic_block a, basic_block b)
1248 tree stmt;
1249 block_stmt_iterator bsi;
1251 if (!single_succ_p (a))
1252 return false;
1254 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1255 return false;
1257 if (single_succ (a) != b)
1258 return false;
1260 if (!single_pred_p (b))
1261 return false;
1263 if (b == EXIT_BLOCK_PTR)
1264 return false;
1266 /* If A ends by a statement causing exceptions or something similar, we
1267 cannot merge the blocks. */
1268 stmt = last_stmt (a);
1269 if (stmt && stmt_ends_bb_p (stmt))
1270 return false;
1272 /* Do not allow a block with only a non-local label to be merged. */
1273 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1274 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1275 return false;
1277 /* There may be no PHI nodes at the start of B. */
1278 if (phi_nodes (b))
1279 return false;
1281 /* Do not remove user labels. */
1282 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1284 stmt = bsi_stmt (bsi);
1285 if (TREE_CODE (stmt) != LABEL_EXPR)
1286 break;
1287 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1288 return false;
1291 /* Protect the loop latches. */
1292 if (current_loops
1293 && b->loop_father->latch == b)
1294 return false;
1296 return true;
1300 /* Merge block B into block A. */
1302 static void
1303 tree_merge_blocks (basic_block a, basic_block b)
1305 block_stmt_iterator bsi;
1306 tree_stmt_iterator last;
1308 if (dump_file)
1309 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1311 /* Ensure that B follows A. */
1312 move_block_after (b, a);
1314 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1315 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1317 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1318 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1320 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1322 tree label = bsi_stmt (bsi);
1324 bsi_remove (&bsi);
1325 /* Now that we can thread computed gotos, we might have
1326 a situation where we have a forced label in block B
1327 However, the label at the start of block B might still be
1328 used in other ways (think about the runtime checking for
1329 Fortran assigned gotos). So we can not just delete the
1330 label. Instead we move the label to the start of block A. */
1331 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1333 block_stmt_iterator dest_bsi = bsi_start (a);
1334 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1337 else
1339 set_bb_for_stmt (bsi_stmt (bsi), a);
1340 bsi_next (&bsi);
1344 /* Merge the chains. */
1345 last = tsi_last (a->stmt_list);
1346 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1347 b->stmt_list = NULL;
1351 /* Walk the function tree removing unnecessary statements.
1353 * Empty statement nodes are removed
1355 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1357 * Unnecessary COND_EXPRs are removed
1359 * Some unnecessary BIND_EXPRs are removed
1361 Clearly more work could be done. The trick is doing the analysis
1362 and removal fast enough to be a net improvement in compile times.
1364 Note that when we remove a control structure such as a COND_EXPR
1365 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1366 to ensure we eliminate all the useless code. */
1368 struct rus_data
1370 tree *last_goto;
1371 bool repeat;
1372 bool may_throw;
1373 bool may_branch;
1374 bool has_label;
1377 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1379 static bool
1380 remove_useless_stmts_warn_notreached (tree stmt)
1382 if (EXPR_HAS_LOCATION (stmt))
1384 location_t loc = EXPR_LOCATION (stmt);
1385 if (LOCATION_LINE (loc) > 0)
1387 warning ("%Hwill never be executed", &loc);
1388 return true;
1392 switch (TREE_CODE (stmt))
1394 case STATEMENT_LIST:
1396 tree_stmt_iterator i;
1397 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1398 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1399 return true;
1401 break;
1403 case COND_EXPR:
1404 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1405 return true;
1406 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1407 return true;
1408 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1409 return true;
1410 break;
1412 case TRY_FINALLY_EXPR:
1413 case TRY_CATCH_EXPR:
1414 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1415 return true;
1416 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1417 return true;
1418 break;
1420 case CATCH_EXPR:
1421 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1422 case EH_FILTER_EXPR:
1423 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1424 case BIND_EXPR:
1425 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1427 default:
1428 /* Not a live container. */
1429 break;
1432 return false;
1435 static void
1436 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1438 tree then_clause, else_clause, cond;
1439 bool save_has_label, then_has_label, else_has_label;
1441 save_has_label = data->has_label;
1442 data->has_label = false;
1443 data->last_goto = NULL;
1445 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1447 then_has_label = data->has_label;
1448 data->has_label = false;
1449 data->last_goto = NULL;
1451 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1453 else_has_label = data->has_label;
1454 data->has_label = save_has_label | then_has_label | else_has_label;
1456 then_clause = COND_EXPR_THEN (*stmt_p);
1457 else_clause = COND_EXPR_ELSE (*stmt_p);
1458 cond = fold (COND_EXPR_COND (*stmt_p));
1460 /* If neither arm does anything at all, we can remove the whole IF. */
1461 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1463 *stmt_p = build_empty_stmt ();
1464 data->repeat = true;
1467 /* If there are no reachable statements in an arm, then we can
1468 zap the entire conditional. */
1469 else if (integer_nonzerop (cond) && !else_has_label)
1471 if (warn_notreached)
1472 remove_useless_stmts_warn_notreached (else_clause);
1473 *stmt_p = then_clause;
1474 data->repeat = true;
1476 else if (integer_zerop (cond) && !then_has_label)
1478 if (warn_notreached)
1479 remove_useless_stmts_warn_notreached (then_clause);
1480 *stmt_p = else_clause;
1481 data->repeat = true;
1484 /* Check a couple of simple things on then/else with single stmts. */
1485 else
1487 tree then_stmt = expr_only (then_clause);
1488 tree else_stmt = expr_only (else_clause);
1490 /* Notice branches to a common destination. */
1491 if (then_stmt && else_stmt
1492 && TREE_CODE (then_stmt) == GOTO_EXPR
1493 && TREE_CODE (else_stmt) == GOTO_EXPR
1494 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1496 *stmt_p = then_stmt;
1497 data->repeat = true;
1500 /* If the THEN/ELSE clause merely assigns a value to a variable or
1501 parameter which is already known to contain that value, then
1502 remove the useless THEN/ELSE clause. */
1503 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1505 if (else_stmt
1506 && TREE_CODE (else_stmt) == MODIFY_EXPR
1507 && TREE_OPERAND (else_stmt, 0) == cond
1508 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1509 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1511 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1512 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1513 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1514 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1516 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1517 ? then_stmt : else_stmt);
1518 tree *location = (TREE_CODE (cond) == EQ_EXPR
1519 ? &COND_EXPR_THEN (*stmt_p)
1520 : &COND_EXPR_ELSE (*stmt_p));
1522 if (stmt
1523 && TREE_CODE (stmt) == MODIFY_EXPR
1524 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1525 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1526 *location = alloc_stmt_list ();
1530 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1531 would be re-introduced during lowering. */
1532 data->last_goto = NULL;
1536 static void
1537 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1539 bool save_may_branch, save_may_throw;
1540 bool this_may_branch, this_may_throw;
1542 /* Collect may_branch and may_throw information for the body only. */
1543 save_may_branch = data->may_branch;
1544 save_may_throw = data->may_throw;
1545 data->may_branch = false;
1546 data->may_throw = false;
1547 data->last_goto = NULL;
1549 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1551 this_may_branch = data->may_branch;
1552 this_may_throw = data->may_throw;
1553 data->may_branch |= save_may_branch;
1554 data->may_throw |= save_may_throw;
1555 data->last_goto = NULL;
1557 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1559 /* If the body is empty, then we can emit the FINALLY block without
1560 the enclosing TRY_FINALLY_EXPR. */
1561 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1563 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1564 data->repeat = true;
1567 /* If the handler is empty, then we can emit the TRY block without
1568 the enclosing TRY_FINALLY_EXPR. */
1569 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1571 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1572 data->repeat = true;
1575 /* If the body neither throws, nor branches, then we can safely
1576 string the TRY and FINALLY blocks together. */
1577 else if (!this_may_branch && !this_may_throw)
1579 tree stmt = *stmt_p;
1580 *stmt_p = TREE_OPERAND (stmt, 0);
1581 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1582 data->repeat = true;
1587 static void
1588 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1590 bool save_may_throw, this_may_throw;
1591 tree_stmt_iterator i;
1592 tree stmt;
1594 /* Collect may_throw information for the body only. */
1595 save_may_throw = data->may_throw;
1596 data->may_throw = false;
1597 data->last_goto = NULL;
1599 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1601 this_may_throw = data->may_throw;
1602 data->may_throw = save_may_throw;
1604 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1605 if (!this_may_throw)
1607 if (warn_notreached)
1608 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1609 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1610 data->repeat = true;
1611 return;
1614 /* Process the catch clause specially. We may be able to tell that
1615 no exceptions propagate past this point. */
1617 this_may_throw = true;
1618 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1619 stmt = tsi_stmt (i);
1620 data->last_goto = NULL;
1622 switch (TREE_CODE (stmt))
1624 case CATCH_EXPR:
1625 for (; !tsi_end_p (i); tsi_next (&i))
1627 stmt = tsi_stmt (i);
1628 /* If we catch all exceptions, then the body does not
1629 propagate exceptions past this point. */
1630 if (CATCH_TYPES (stmt) == NULL)
1631 this_may_throw = false;
1632 data->last_goto = NULL;
1633 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1635 break;
1637 case EH_FILTER_EXPR:
1638 if (EH_FILTER_MUST_NOT_THROW (stmt))
1639 this_may_throw = false;
1640 else if (EH_FILTER_TYPES (stmt) == NULL)
1641 this_may_throw = false;
1642 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1643 break;
1645 default:
1646 /* Otherwise this is a cleanup. */
1647 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1649 /* If the cleanup is empty, then we can emit the TRY block without
1650 the enclosing TRY_CATCH_EXPR. */
1651 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1653 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1654 data->repeat = true;
1656 break;
1658 data->may_throw |= this_may_throw;
1662 static void
1663 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1665 tree block;
1667 /* First remove anything underneath the BIND_EXPR. */
1668 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1670 /* If the BIND_EXPR has no variables, then we can pull everything
1671 up one level and remove the BIND_EXPR, unless this is the toplevel
1672 BIND_EXPR for the current function or an inlined function.
1674 When this situation occurs we will want to apply this
1675 optimization again. */
1676 block = BIND_EXPR_BLOCK (*stmt_p);
1677 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1678 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1679 && (! block
1680 || ! BLOCK_ABSTRACT_ORIGIN (block)
1681 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1682 != FUNCTION_DECL)))
1684 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1685 data->repeat = true;
1690 static void
1691 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1693 tree dest = GOTO_DESTINATION (*stmt_p);
1695 data->may_branch = true;
1696 data->last_goto = NULL;
1698 /* Record the last goto expr, so that we can delete it if unnecessary. */
1699 if (TREE_CODE (dest) == LABEL_DECL)
1700 data->last_goto = stmt_p;
1704 static void
1705 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1707 tree label = LABEL_EXPR_LABEL (*stmt_p);
1709 data->has_label = true;
1711 /* We do want to jump across non-local label receiver code. */
1712 if (DECL_NONLOCAL (label))
1713 data->last_goto = NULL;
1715 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1717 *data->last_goto = build_empty_stmt ();
1718 data->repeat = true;
1721 /* ??? Add something here to delete unused labels. */
1725 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1726 decl. This allows us to eliminate redundant or useless
1727 calls to "const" functions.
1729 Gimplifier already does the same operation, but we may notice functions
1730 being const and pure once their calls has been gimplified, so we need
1731 to update the flag. */
1733 static void
1734 update_call_expr_flags (tree call)
1736 tree decl = get_callee_fndecl (call);
1737 if (!decl)
1738 return;
1739 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1740 TREE_SIDE_EFFECTS (call) = 0;
1741 if (TREE_NOTHROW (decl))
1742 TREE_NOTHROW (call) = 1;
1746 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1748 void
1749 notice_special_calls (tree t)
1751 int flags = call_expr_flags (t);
1753 if (flags & ECF_MAY_BE_ALLOCA)
1754 current_function_calls_alloca = true;
1755 if (flags & ECF_RETURNS_TWICE)
1756 current_function_calls_setjmp = true;
1760 /* Clear flags set by notice_special_calls. Used by dead code removal
1761 to update the flags. */
1763 void
1764 clear_special_calls (void)
1766 current_function_calls_alloca = false;
1767 current_function_calls_setjmp = false;
1771 static void
1772 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1774 tree t = *tp, op;
1776 switch (TREE_CODE (t))
1778 case COND_EXPR:
1779 remove_useless_stmts_cond (tp, data);
1780 break;
1782 case TRY_FINALLY_EXPR:
1783 remove_useless_stmts_tf (tp, data);
1784 break;
1786 case TRY_CATCH_EXPR:
1787 remove_useless_stmts_tc (tp, data);
1788 break;
1790 case BIND_EXPR:
1791 remove_useless_stmts_bind (tp, data);
1792 break;
1794 case GOTO_EXPR:
1795 remove_useless_stmts_goto (tp, data);
1796 break;
1798 case LABEL_EXPR:
1799 remove_useless_stmts_label (tp, data);
1800 break;
1802 case RETURN_EXPR:
1803 fold_stmt (tp);
1804 data->last_goto = NULL;
1805 data->may_branch = true;
1806 break;
1808 case CALL_EXPR:
1809 fold_stmt (tp);
1810 data->last_goto = NULL;
1811 notice_special_calls (t);
1812 update_call_expr_flags (t);
1813 if (tree_could_throw_p (t))
1814 data->may_throw = true;
1815 break;
1817 case MODIFY_EXPR:
1818 data->last_goto = NULL;
1819 fold_stmt (tp);
1820 op = get_call_expr_in (t);
1821 if (op)
1823 update_call_expr_flags (op);
1824 notice_special_calls (op);
1826 if (tree_could_throw_p (t))
1827 data->may_throw = true;
1828 break;
1830 case STATEMENT_LIST:
1832 tree_stmt_iterator i = tsi_start (t);
1833 while (!tsi_end_p (i))
1835 t = tsi_stmt (i);
1836 if (IS_EMPTY_STMT (t))
1838 tsi_delink (&i);
1839 continue;
1842 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1844 t = tsi_stmt (i);
1845 if (TREE_CODE (t) == STATEMENT_LIST)
1847 tsi_link_before (&i, t, TSI_SAME_STMT);
1848 tsi_delink (&i);
1850 else
1851 tsi_next (&i);
1854 break;
1855 case ASM_EXPR:
1856 fold_stmt (tp);
1857 data->last_goto = NULL;
1858 break;
1860 default:
1861 data->last_goto = NULL;
1862 break;
1866 static void
1867 remove_useless_stmts (void)
1869 struct rus_data data;
1871 clear_special_calls ();
1875 memset (&data, 0, sizeof (data));
1876 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1878 while (data.repeat);
1882 struct tree_opt_pass pass_remove_useless_stmts =
1884 "useless", /* name */
1885 NULL, /* gate */
1886 remove_useless_stmts, /* execute */
1887 NULL, /* sub */
1888 NULL, /* next */
1889 0, /* static_pass_number */
1890 0, /* tv_id */
1891 PROP_gimple_any, /* properties_required */
1892 0, /* properties_provided */
1893 0, /* properties_destroyed */
1894 0, /* todo_flags_start */
1895 TODO_dump_func, /* todo_flags_finish */
1896 0 /* letter */
1899 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1901 static void
1902 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1904 tree phi;
1906 /* Since this block is no longer reachable, we can just delete all
1907 of its PHI nodes. */
1908 phi = phi_nodes (bb);
1909 while (phi)
1911 tree next = PHI_CHAIN (phi);
1912 remove_phi_node (phi, NULL_TREE);
1913 phi = next;
1916 /* Remove edges to BB's successors. */
1917 while (EDGE_COUNT (bb->succs) > 0)
1918 remove_edge (EDGE_SUCC (bb, 0));
1922 /* Remove statements of basic block BB. */
1924 static void
1925 remove_bb (basic_block bb)
1927 block_stmt_iterator i;
1928 #ifdef USE_MAPPED_LOCATION
1929 source_location loc = UNKNOWN_LOCATION;
1930 #else
1931 source_locus loc = 0;
1932 #endif
1934 if (dump_file)
1936 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1937 if (dump_flags & TDF_DETAILS)
1939 dump_bb (bb, dump_file, 0);
1940 fprintf (dump_file, "\n");
1944 /* If we remove the header or the latch of a loop, mark the loop for
1945 removal by setting its header and latch to NULL. */
1946 if (current_loops)
1948 struct loop *loop = bb->loop_father;
1950 if (loop->latch == bb
1951 || loop->header == bb)
1953 loop->latch = NULL;
1954 loop->header = NULL;
1958 /* Remove all the instructions in the block. */
1959 for (i = bsi_start (bb); !bsi_end_p (i);)
1961 tree stmt = bsi_stmt (i);
1962 if (TREE_CODE (stmt) == LABEL_EXPR
1963 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
1965 basic_block new_bb = bb->prev_bb;
1966 block_stmt_iterator new_bsi = bsi_start (new_bb);
1968 bsi_remove (&i);
1969 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
1971 else
1973 release_defs (stmt);
1975 set_bb_for_stmt (stmt, NULL);
1976 bsi_remove (&i);
1979 /* Don't warn for removed gotos. Gotos are often removed due to
1980 jump threading, thus resulting in bogus warnings. Not great,
1981 since this way we lose warnings for gotos in the original
1982 program that are indeed unreachable. */
1983 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1985 #ifdef USE_MAPPED_LOCATION
1986 if (EXPR_HAS_LOCATION (stmt))
1987 loc = EXPR_LOCATION (stmt);
1988 #else
1989 source_locus t;
1990 t = EXPR_LOCUS (stmt);
1991 if (t && LOCATION_LINE (*t) > 0)
1992 loc = t;
1993 #endif
1997 /* If requested, give a warning that the first statement in the
1998 block is unreachable. We walk statements backwards in the
1999 loop above, so the last statement we process is the first statement
2000 in the block. */
2001 #ifdef USE_MAPPED_LOCATION
2002 if (warn_notreached && loc > BUILTINS_LOCATION)
2003 warning ("%Hwill never be executed", &loc);
2004 #else
2005 if (warn_notreached && loc)
2006 warning ("%Hwill never be executed", loc);
2007 #endif
2009 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2012 /* A list of all the noreturn calls passed to modify_stmt.
2013 cleanup_control_flow uses it to detect cases where a mid-block
2014 indirect call has been turned into a noreturn call. When this
2015 happens, all the instructions after the call are no longer
2016 reachable and must be deleted as dead. */
2018 VEC(tree) *modified_noreturn_calls;
2020 /* Try to remove superfluous control structures. */
2022 static bool
2023 cleanup_control_flow (void)
2025 basic_block bb;
2026 block_stmt_iterator bsi;
2027 bool retval = false;
2028 tree stmt;
2030 /* Detect cases where a mid-block call is now known not to return. */
2031 while (VEC_length (tree, modified_noreturn_calls))
2033 stmt = VEC_pop (tree, modified_noreturn_calls);
2034 bb = bb_for_stmt (stmt);
2035 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2036 split_block (bb, stmt);
2039 FOR_EACH_BB (bb)
2041 bsi = bsi_last (bb);
2043 if (bsi_end_p (bsi))
2044 continue;
2046 stmt = bsi_stmt (bsi);
2047 if (TREE_CODE (stmt) == COND_EXPR
2048 || TREE_CODE (stmt) == SWITCH_EXPR)
2049 retval |= cleanup_control_expr_graph (bb, bsi);
2051 /* If we had a computed goto which has a compile-time determinable
2052 destination, then we can eliminate the goto. */
2053 if (TREE_CODE (stmt) == GOTO_EXPR
2054 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
2055 && TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) == LABEL_DECL)
2057 edge e;
2058 tree label;
2059 edge_iterator ei;
2060 basic_block target_block;
2061 bool removed_edge = false;
2063 /* First look at all the outgoing edges. Delete any outgoing
2064 edges which do not go to the right block. For the one
2065 edge which goes to the right block, fix up its flags. */
2066 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
2067 target_block = label_to_block (label);
2068 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2070 if (e->dest != target_block)
2072 removed_edge = true;
2073 remove_edge (e);
2075 else
2077 /* Turn off the EDGE_ABNORMAL flag. */
2078 e->flags &= ~EDGE_ABNORMAL;
2080 /* And set EDGE_FALLTHRU. */
2081 e->flags |= EDGE_FALLTHRU;
2082 ei_next (&ei);
2086 /* If we removed one or more edges, then we will need to fix the
2087 dominators. It may be possible to incrementally update them. */
2088 if (removed_edge)
2089 free_dominance_info (CDI_DOMINATORS);
2091 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
2092 relevant information we need. */
2093 bsi_remove (&bsi);
2094 retval = true;
2097 /* Check for indirect calls that have been turned into
2098 noreturn calls. */
2099 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2101 free_dominance_info (CDI_DOMINATORS);
2102 retval = true;
2105 return retval;
2109 /* Disconnect an unreachable block in the control expression starting
2110 at block BB. */
2112 static bool
2113 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2115 edge taken_edge;
2116 bool retval = false;
2117 tree expr = bsi_stmt (bsi), val;
2119 if (!single_succ_p (bb))
2121 edge e;
2122 edge_iterator ei;
2124 switch (TREE_CODE (expr))
2126 case COND_EXPR:
2127 val = COND_EXPR_COND (expr);
2128 break;
2130 case SWITCH_EXPR:
2131 val = SWITCH_COND (expr);
2132 if (TREE_CODE (val) != INTEGER_CST)
2133 return false;
2134 break;
2136 default:
2137 gcc_unreachable ();
2140 taken_edge = find_taken_edge (bb, val);
2141 if (!taken_edge)
2142 return false;
2144 /* Remove all the edges except the one that is always executed. */
2145 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2147 if (e != taken_edge)
2149 taken_edge->probability += e->probability;
2150 taken_edge->count += e->count;
2151 remove_edge (e);
2152 retval = true;
2154 else
2155 ei_next (&ei);
2157 if (taken_edge->probability > REG_BR_PROB_BASE)
2158 taken_edge->probability = REG_BR_PROB_BASE;
2160 else
2161 taken_edge = single_succ_edge (bb);
2163 bsi_remove (&bsi);
2164 taken_edge->flags = EDGE_FALLTHRU;
2166 /* We removed some paths from the cfg. */
2167 free_dominance_info (CDI_DOMINATORS);
2169 return retval;
2172 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2174 static bool
2175 remove_fallthru_edge (VEC(edge) *ev)
2177 edge_iterator ei;
2178 edge e;
2180 FOR_EACH_EDGE (e, ei, ev)
2181 if ((e->flags & EDGE_FALLTHRU) != 0)
2183 remove_edge (e);
2184 return true;
2186 return false;
2189 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2190 predicate VAL, return the edge that will be taken out of the block.
2191 If VAL does not match a unique edge, NULL is returned. */
2193 edge
2194 find_taken_edge (basic_block bb, tree val)
2196 tree stmt;
2198 stmt = last_stmt (bb);
2200 gcc_assert (stmt);
2201 gcc_assert (is_ctrl_stmt (stmt));
2202 gcc_assert (val);
2204 if (! is_gimple_min_invariant (val))
2205 return NULL;
2207 if (TREE_CODE (stmt) == COND_EXPR)
2208 return find_taken_edge_cond_expr (bb, val);
2210 if (TREE_CODE (stmt) == SWITCH_EXPR)
2211 return find_taken_edge_switch_expr (bb, val);
2213 if (computed_goto_p (stmt))
2214 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2216 gcc_unreachable ();
2219 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2220 statement, determine which of the outgoing edges will be taken out of the
2221 block. Return NULL if either edge may be taken. */
2223 static edge
2224 find_taken_edge_computed_goto (basic_block bb, tree val)
2226 basic_block dest;
2227 edge e = NULL;
2229 dest = label_to_block (val);
2230 if (dest)
2232 e = find_edge (bb, dest);
2233 gcc_assert (e != NULL);
2236 return e;
2239 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2240 statement, determine which of the two edges will be taken out of the
2241 block. Return NULL if either edge may be taken. */
2243 static edge
2244 find_taken_edge_cond_expr (basic_block bb, tree val)
2246 edge true_edge, false_edge;
2248 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2250 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2251 return (zero_p (val) ? false_edge : true_edge);
2254 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2255 statement, determine which edge will be taken out of the block. Return
2256 NULL if any edge may be taken. */
2258 static edge
2259 find_taken_edge_switch_expr (basic_block bb, tree val)
2261 tree switch_expr, taken_case;
2262 basic_block dest_bb;
2263 edge e;
2265 switch_expr = last_stmt (bb);
2266 taken_case = find_case_label_for_value (switch_expr, val);
2267 dest_bb = label_to_block (CASE_LABEL (taken_case));
2269 e = find_edge (bb, dest_bb);
2270 gcc_assert (e);
2271 return e;
2275 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2276 We can make optimal use here of the fact that the case labels are
2277 sorted: We can do a binary search for a case matching VAL. */
2279 static tree
2280 find_case_label_for_value (tree switch_expr, tree val)
2282 tree vec = SWITCH_LABELS (switch_expr);
2283 size_t low, high, n = TREE_VEC_LENGTH (vec);
2284 tree default_case = TREE_VEC_ELT (vec, n - 1);
2286 for (low = -1, high = n - 1; high - low > 1; )
2288 size_t i = (high + low) / 2;
2289 tree t = TREE_VEC_ELT (vec, i);
2290 int cmp;
2292 /* Cache the result of comparing CASE_LOW and val. */
2293 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2295 if (cmp > 0)
2296 high = i;
2297 else
2298 low = i;
2300 if (CASE_HIGH (t) == NULL)
2302 /* A singe-valued case label. */
2303 if (cmp == 0)
2304 return t;
2306 else
2308 /* A case range. We can only handle integer ranges. */
2309 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2310 return t;
2314 return default_case;
2318 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2319 those alternatives are equal in each of the PHI nodes, then return
2320 true, else return false. */
2322 static bool
2323 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2325 int n1 = e1->dest_idx;
2326 int n2 = e2->dest_idx;
2327 tree phi;
2329 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2331 tree val1 = PHI_ARG_DEF (phi, n1);
2332 tree val2 = PHI_ARG_DEF (phi, n2);
2334 gcc_assert (val1 != NULL_TREE);
2335 gcc_assert (val2 != NULL_TREE);
2337 if (!operand_equal_for_phi_arg_p (val1, val2))
2338 return false;
2341 return true;
2345 /*---------------------------------------------------------------------------
2346 Debugging functions
2347 ---------------------------------------------------------------------------*/
2349 /* Dump tree-specific information of block BB to file OUTF. */
2351 void
2352 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2354 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2358 /* Dump a basic block on stderr. */
2360 void
2361 debug_tree_bb (basic_block bb)
2363 dump_bb (bb, stderr, 0);
2367 /* Dump basic block with index N on stderr. */
2369 basic_block
2370 debug_tree_bb_n (int n)
2372 debug_tree_bb (BASIC_BLOCK (n));
2373 return BASIC_BLOCK (n);
2377 /* Dump the CFG on stderr.
2379 FLAGS are the same used by the tree dumping functions
2380 (see TDF_* in tree.h). */
2382 void
2383 debug_tree_cfg (int flags)
2385 dump_tree_cfg (stderr, flags);
2389 /* Dump the program showing basic block boundaries on the given FILE.
2391 FLAGS are the same used by the tree dumping functions (see TDF_* in
2392 tree.h). */
2394 void
2395 dump_tree_cfg (FILE *file, int flags)
2397 if (flags & TDF_DETAILS)
2399 const char *funcname
2400 = lang_hooks.decl_printable_name (current_function_decl, 2);
2402 fputc ('\n', file);
2403 fprintf (file, ";; Function %s\n\n", funcname);
2404 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2405 n_basic_blocks, n_edges, last_basic_block);
2407 brief_dump_cfg (file);
2408 fprintf (file, "\n");
2411 if (flags & TDF_STATS)
2412 dump_cfg_stats (file);
2414 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2418 /* Dump CFG statistics on FILE. */
2420 void
2421 dump_cfg_stats (FILE *file)
2423 static long max_num_merged_labels = 0;
2424 unsigned long size, total = 0;
2425 long num_edges;
2426 basic_block bb;
2427 const char * const fmt_str = "%-30s%-13s%12s\n";
2428 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2429 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2430 const char *funcname
2431 = lang_hooks.decl_printable_name (current_function_decl, 2);
2434 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2436 fprintf (file, "---------------------------------------------------------\n");
2437 fprintf (file, fmt_str, "", " Number of ", "Memory");
2438 fprintf (file, fmt_str, "", " instances ", "used ");
2439 fprintf (file, "---------------------------------------------------------\n");
2441 size = n_basic_blocks * sizeof (struct basic_block_def);
2442 total += size;
2443 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2444 SCALE (size), LABEL (size));
2446 num_edges = 0;
2447 FOR_EACH_BB (bb)
2448 num_edges += EDGE_COUNT (bb->succs);
2449 size = num_edges * sizeof (struct edge_def);
2450 total += size;
2451 fprintf (file, fmt_str_1, "Edges", num_edges, SCALE (size), LABEL (size));
2453 size = n_basic_blocks * sizeof (struct bb_ann_d);
2454 total += size;
2455 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2456 SCALE (size), LABEL (size));
2458 fprintf (file, "---------------------------------------------------------\n");
2459 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2460 LABEL (total));
2461 fprintf (file, "---------------------------------------------------------\n");
2462 fprintf (file, "\n");
2464 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2465 max_num_merged_labels = cfg_stats.num_merged_labels;
2467 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2468 cfg_stats.num_merged_labels, max_num_merged_labels);
2470 fprintf (file, "\n");
2474 /* Dump CFG statistics on stderr. Keep extern so that it's always
2475 linked in the final executable. */
2477 void
2478 debug_cfg_stats (void)
2480 dump_cfg_stats (stderr);
2484 /* Dump the flowgraph to a .vcg FILE. */
2486 static void
2487 tree_cfg2vcg (FILE *file)
2489 edge e;
2490 edge_iterator ei;
2491 basic_block bb;
2492 const char *funcname
2493 = lang_hooks.decl_printable_name (current_function_decl, 2);
2495 /* Write the file header. */
2496 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2497 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2498 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2500 /* Write blocks and edges. */
2501 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2503 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2504 e->dest->index);
2506 if (e->flags & EDGE_FAKE)
2507 fprintf (file, " linestyle: dotted priority: 10");
2508 else
2509 fprintf (file, " linestyle: solid priority: 100");
2511 fprintf (file, " }\n");
2513 fputc ('\n', file);
2515 FOR_EACH_BB (bb)
2517 enum tree_code head_code, end_code;
2518 const char *head_name, *end_name;
2519 int head_line = 0;
2520 int end_line = 0;
2521 tree first = first_stmt (bb);
2522 tree last = last_stmt (bb);
2524 if (first)
2526 head_code = TREE_CODE (first);
2527 head_name = tree_code_name[head_code];
2528 head_line = get_lineno (first);
2530 else
2531 head_name = "no-statement";
2533 if (last)
2535 end_code = TREE_CODE (last);
2536 end_name = tree_code_name[end_code];
2537 end_line = get_lineno (last);
2539 else
2540 end_name = "no-statement";
2542 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2543 bb->index, bb->index, head_name, head_line, end_name,
2544 end_line);
2546 FOR_EACH_EDGE (e, ei, bb->succs)
2548 if (e->dest == EXIT_BLOCK_PTR)
2549 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2550 else
2551 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2553 if (e->flags & EDGE_FAKE)
2554 fprintf (file, " priority: 10 linestyle: dotted");
2555 else
2556 fprintf (file, " priority: 100 linestyle: solid");
2558 fprintf (file, " }\n");
2561 if (bb->next_bb != EXIT_BLOCK_PTR)
2562 fputc ('\n', file);
2565 fputs ("}\n\n", file);
2570 /*---------------------------------------------------------------------------
2571 Miscellaneous helpers
2572 ---------------------------------------------------------------------------*/
2574 /* Return true if T represents a stmt that always transfers control. */
2576 bool
2577 is_ctrl_stmt (tree t)
2579 return (TREE_CODE (t) == COND_EXPR
2580 || TREE_CODE (t) == SWITCH_EXPR
2581 || TREE_CODE (t) == GOTO_EXPR
2582 || TREE_CODE (t) == RETURN_EXPR
2583 || TREE_CODE (t) == RESX_EXPR);
2587 /* Return true if T is a statement that may alter the flow of control
2588 (e.g., a call to a non-returning function). */
2590 bool
2591 is_ctrl_altering_stmt (tree t)
2593 tree call;
2595 gcc_assert (t);
2596 call = get_call_expr_in (t);
2597 if (call)
2599 /* A non-pure/const CALL_EXPR alters flow control if the current
2600 function has nonlocal labels. */
2601 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2602 return true;
2604 /* A CALL_EXPR also alters control flow if it does not return. */
2605 if (call_expr_flags (call) & ECF_NORETURN)
2606 return true;
2609 /* If a statement can throw, it alters control flow. */
2610 return tree_can_throw_internal (t);
2614 /* Return true if T is a computed goto. */
2616 bool
2617 computed_goto_p (tree t)
2619 return (TREE_CODE (t) == GOTO_EXPR
2620 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2624 /* Checks whether EXPR is a simple local goto. */
2626 bool
2627 simple_goto_p (tree expr)
2629 return (TREE_CODE (expr) == GOTO_EXPR
2630 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2634 /* Return true if T should start a new basic block. PREV_T is the
2635 statement preceding T. It is used when T is a label or a case label.
2636 Labels should only start a new basic block if their previous statement
2637 wasn't a label. Otherwise, sequence of labels would generate
2638 unnecessary basic blocks that only contain a single label. */
2640 static inline bool
2641 stmt_starts_bb_p (tree t, tree prev_t)
2643 if (t == NULL_TREE)
2644 return false;
2646 /* LABEL_EXPRs start a new basic block only if the preceding
2647 statement wasn't a label of the same type. This prevents the
2648 creation of consecutive blocks that have nothing but a single
2649 label. */
2650 if (TREE_CODE (t) == LABEL_EXPR)
2652 /* Nonlocal and computed GOTO targets always start a new block. */
2653 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2654 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2655 return true;
2657 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2659 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2660 return true;
2662 cfg_stats.num_merged_labels++;
2663 return false;
2665 else
2666 return true;
2669 return false;
2673 /* Return true if T should end a basic block. */
2675 bool
2676 stmt_ends_bb_p (tree t)
2678 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2682 /* Add gotos that used to be represented implicitly in the CFG. */
2684 void
2685 disband_implicit_edges (void)
2687 basic_block bb;
2688 block_stmt_iterator last;
2689 edge e;
2690 edge_iterator ei;
2691 tree stmt, label;
2693 FOR_EACH_BB (bb)
2695 last = bsi_last (bb);
2696 stmt = last_stmt (bb);
2698 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2700 /* Remove superfluous gotos from COND_EXPR branches. Moved
2701 from cfg_remove_useless_stmts here since it violates the
2702 invariants for tree--cfg correspondence and thus fits better
2703 here where we do it anyway. */
2704 e = find_edge (bb, bb->next_bb);
2705 if (e)
2707 if (e->flags & EDGE_TRUE_VALUE)
2708 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2709 else if (e->flags & EDGE_FALSE_VALUE)
2710 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2711 else
2712 gcc_unreachable ();
2713 e->flags |= EDGE_FALLTHRU;
2716 continue;
2719 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2721 /* Remove the RETURN_EXPR if we may fall though to the exit
2722 instead. */
2723 gcc_assert (single_succ_p (bb));
2724 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2726 if (bb->next_bb == EXIT_BLOCK_PTR
2727 && !TREE_OPERAND (stmt, 0))
2729 bsi_remove (&last);
2730 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2732 continue;
2735 /* There can be no fallthru edge if the last statement is a control
2736 one. */
2737 if (stmt && is_ctrl_stmt (stmt))
2738 continue;
2740 /* Find a fallthru edge and emit the goto if necessary. */
2741 FOR_EACH_EDGE (e, ei, bb->succs)
2742 if (e->flags & EDGE_FALLTHRU)
2743 break;
2745 if (!e || e->dest == bb->next_bb)
2746 continue;
2748 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2749 label = tree_block_label (e->dest);
2751 stmt = build1 (GOTO_EXPR, void_type_node, label);
2752 #ifdef USE_MAPPED_LOCATION
2753 SET_EXPR_LOCATION (stmt, e->goto_locus);
2754 #else
2755 SET_EXPR_LOCUS (stmt, e->goto_locus);
2756 #endif
2757 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2758 e->flags &= ~EDGE_FALLTHRU;
2762 /* Remove block annotations and other datastructures. */
2764 void
2765 delete_tree_cfg_annotations (void)
2767 basic_block bb;
2768 if (n_basic_blocks > 0)
2769 free_blocks_annotations ();
2771 label_to_block_map = NULL;
2772 FOR_EACH_BB (bb)
2773 bb->rbi = NULL;
2777 /* Return the first statement in basic block BB. */
2779 tree
2780 first_stmt (basic_block bb)
2782 block_stmt_iterator i = bsi_start (bb);
2783 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2787 /* Return the last statement in basic block BB. */
2789 tree
2790 last_stmt (basic_block bb)
2792 block_stmt_iterator b = bsi_last (bb);
2793 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2797 /* Return a pointer to the last statement in block BB. */
2799 tree *
2800 last_stmt_ptr (basic_block bb)
2802 block_stmt_iterator last = bsi_last (bb);
2803 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2807 /* Return the last statement of an otherwise empty block. Return NULL
2808 if the block is totally empty, or if it contains more than one
2809 statement. */
2811 tree
2812 last_and_only_stmt (basic_block bb)
2814 block_stmt_iterator i = bsi_last (bb);
2815 tree last, prev;
2817 if (bsi_end_p (i))
2818 return NULL_TREE;
2820 last = bsi_stmt (i);
2821 bsi_prev (&i);
2822 if (bsi_end_p (i))
2823 return last;
2825 /* Empty statements should no longer appear in the instruction stream.
2826 Everything that might have appeared before should be deleted by
2827 remove_useless_stmts, and the optimizers should just bsi_remove
2828 instead of smashing with build_empty_stmt.
2830 Thus the only thing that should appear here in a block containing
2831 one executable statement is a label. */
2832 prev = bsi_stmt (i);
2833 if (TREE_CODE (prev) == LABEL_EXPR)
2834 return last;
2835 else
2836 return NULL_TREE;
2840 /* Mark BB as the basic block holding statement T. */
2842 void
2843 set_bb_for_stmt (tree t, basic_block bb)
2845 if (TREE_CODE (t) == PHI_NODE)
2846 PHI_BB (t) = bb;
2847 else if (TREE_CODE (t) == STATEMENT_LIST)
2849 tree_stmt_iterator i;
2850 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2851 set_bb_for_stmt (tsi_stmt (i), bb);
2853 else
2855 stmt_ann_t ann = get_stmt_ann (t);
2856 ann->bb = bb;
2858 /* If the statement is a label, add the label to block-to-labels map
2859 so that we can speed up edge creation for GOTO_EXPRs. */
2860 if (TREE_CODE (t) == LABEL_EXPR)
2862 int uid;
2864 t = LABEL_EXPR_LABEL (t);
2865 uid = LABEL_DECL_UID (t);
2866 if (uid == -1)
2868 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2869 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2870 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2872 else
2873 /* We're moving an existing label. Make sure that we've
2874 removed it from the old block. */
2875 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2876 VARRAY_BB (label_to_block_map, uid) = bb;
2881 /* Finds iterator for STMT. */
2883 extern block_stmt_iterator
2884 bsi_for_stmt (tree stmt)
2886 block_stmt_iterator bsi;
2888 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2889 if (bsi_stmt (bsi) == stmt)
2890 return bsi;
2892 gcc_unreachable ();
2895 /* Mark statement T as modified, and update it. */
2896 static inline void
2897 update_modified_stmts (tree t)
2899 if (TREE_CODE (t) == STATEMENT_LIST)
2901 tree_stmt_iterator i;
2902 tree stmt;
2903 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2905 stmt = tsi_stmt (i);
2906 update_stmt_if_modified (stmt);
2909 else
2910 update_stmt_if_modified (t);
2913 /* Insert statement (or statement list) T before the statement
2914 pointed-to by iterator I. M specifies how to update iterator I
2915 after insertion (see enum bsi_iterator_update). */
2917 void
2918 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2920 set_bb_for_stmt (t, i->bb);
2921 update_modified_stmts (t);
2922 tsi_link_before (&i->tsi, t, m);
2926 /* Insert statement (or statement list) T after the statement
2927 pointed-to by iterator I. M specifies how to update iterator I
2928 after insertion (see enum bsi_iterator_update). */
2930 void
2931 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2933 set_bb_for_stmt (t, i->bb);
2934 update_modified_stmts (t);
2935 tsi_link_after (&i->tsi, t, m);
2939 /* Remove the statement pointed to by iterator I. The iterator is updated
2940 to the next statement. */
2942 void
2943 bsi_remove (block_stmt_iterator *i)
2945 tree t = bsi_stmt (*i);
2946 set_bb_for_stmt (t, NULL);
2947 delink_stmt_imm_use (t);
2948 tsi_delink (&i->tsi);
2949 mark_stmt_modified (t);
2953 /* Move the statement at FROM so it comes right after the statement at TO. */
2955 void
2956 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2958 tree stmt = bsi_stmt (*from);
2959 bsi_remove (from);
2960 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2964 /* Move the statement at FROM so it comes right before the statement at TO. */
2966 void
2967 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2969 tree stmt = bsi_stmt (*from);
2970 bsi_remove (from);
2971 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2975 /* Move the statement at FROM to the end of basic block BB. */
2977 void
2978 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2980 block_stmt_iterator last = bsi_last (bb);
2982 /* Have to check bsi_end_p because it could be an empty block. */
2983 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2984 bsi_move_before (from, &last);
2985 else
2986 bsi_move_after (from, &last);
2990 /* Replace the contents of the statement pointed to by iterator BSI
2991 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2992 information of the original statement is preserved. */
2994 void
2995 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2997 int eh_region;
2998 tree orig_stmt = bsi_stmt (*bsi);
3000 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
3001 set_bb_for_stmt (stmt, bsi->bb);
3003 /* Preserve EH region information from the original statement, if
3004 requested by the caller. */
3005 if (preserve_eh_info)
3007 eh_region = lookup_stmt_eh_region (orig_stmt);
3008 if (eh_region >= 0)
3009 add_stmt_to_eh_region (stmt, eh_region);
3012 *bsi_stmt_ptr (*bsi) = stmt;
3013 mark_stmt_modified (stmt);
3014 update_modified_stmts (stmt);
3018 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3019 is made to place the statement in an existing basic block, but
3020 sometimes that isn't possible. When it isn't possible, the edge is
3021 split and the statement is added to the new block.
3023 In all cases, the returned *BSI points to the correct location. The
3024 return value is true if insertion should be done after the location,
3025 or false if it should be done before the location. If new basic block
3026 has to be created, it is stored in *NEW_BB. */
3028 static bool
3029 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3030 basic_block *new_bb)
3032 basic_block dest, src;
3033 tree tmp;
3035 dest = e->dest;
3036 restart:
3038 /* If the destination has one predecessor which has no PHI nodes,
3039 insert there. Except for the exit block.
3041 The requirement for no PHI nodes could be relaxed. Basically we
3042 would have to examine the PHIs to prove that none of them used
3043 the value set by the statement we want to insert on E. That
3044 hardly seems worth the effort. */
3045 if (single_pred_p (dest)
3046 && ! phi_nodes (dest)
3047 && dest != EXIT_BLOCK_PTR)
3049 *bsi = bsi_start (dest);
3050 if (bsi_end_p (*bsi))
3051 return true;
3053 /* Make sure we insert after any leading labels. */
3054 tmp = bsi_stmt (*bsi);
3055 while (TREE_CODE (tmp) == LABEL_EXPR)
3057 bsi_next (bsi);
3058 if (bsi_end_p (*bsi))
3059 break;
3060 tmp = bsi_stmt (*bsi);
3063 if (bsi_end_p (*bsi))
3065 *bsi = bsi_last (dest);
3066 return true;
3068 else
3069 return false;
3072 /* If the source has one successor, the edge is not abnormal and
3073 the last statement does not end a basic block, insert there.
3074 Except for the entry block. */
3075 src = e->src;
3076 if ((e->flags & EDGE_ABNORMAL) == 0
3077 && single_succ_p (src)
3078 && src != ENTRY_BLOCK_PTR)
3080 *bsi = bsi_last (src);
3081 if (bsi_end_p (*bsi))
3082 return true;
3084 tmp = bsi_stmt (*bsi);
3085 if (!stmt_ends_bb_p (tmp))
3086 return true;
3088 /* Insert code just before returning the value. We may need to decompose
3089 the return in the case it contains non-trivial operand. */
3090 if (TREE_CODE (tmp) == RETURN_EXPR)
3092 tree op = TREE_OPERAND (tmp, 0);
3093 if (!is_gimple_val (op))
3095 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3096 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3097 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3099 bsi_prev (bsi);
3100 return true;
3104 /* Otherwise, create a new basic block, and split this edge. */
3105 dest = split_edge (e);
3106 if (new_bb)
3107 *new_bb = dest;
3108 e = single_pred_edge (dest);
3109 goto restart;
3113 /* This routine will commit all pending edge insertions, creating any new
3114 basic blocks which are necessary. */
3116 void
3117 bsi_commit_edge_inserts (void)
3119 basic_block bb;
3120 edge e;
3121 edge_iterator ei;
3123 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3125 FOR_EACH_BB (bb)
3126 FOR_EACH_EDGE (e, ei, bb->succs)
3127 bsi_commit_one_edge_insert (e, NULL);
3131 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3132 to this block, otherwise set it to NULL. */
3134 void
3135 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3137 if (new_bb)
3138 *new_bb = NULL;
3139 if (PENDING_STMT (e))
3141 block_stmt_iterator bsi;
3142 tree stmt = PENDING_STMT (e);
3144 PENDING_STMT (e) = NULL_TREE;
3146 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3147 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3148 else
3149 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3154 /* Add STMT to the pending list of edge E. No actual insertion is
3155 made until a call to bsi_commit_edge_inserts () is made. */
3157 void
3158 bsi_insert_on_edge (edge e, tree stmt)
3160 append_to_statement_list (stmt, &PENDING_STMT (e));
3163 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3164 block has to be created, it is returned. */
3166 basic_block
3167 bsi_insert_on_edge_immediate (edge e, tree stmt)
3169 block_stmt_iterator bsi;
3170 basic_block new_bb = NULL;
3172 gcc_assert (!PENDING_STMT (e));
3174 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3175 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3176 else
3177 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3179 return new_bb;
3182 /*---------------------------------------------------------------------------
3183 Tree specific functions for CFG manipulation
3184 ---------------------------------------------------------------------------*/
3186 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3188 static void
3189 reinstall_phi_args (edge new_edge, edge old_edge)
3191 tree var, phi;
3193 if (!PENDING_STMT (old_edge))
3194 return;
3196 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3197 var && phi;
3198 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3200 tree result = TREE_PURPOSE (var);
3201 tree arg = TREE_VALUE (var);
3203 gcc_assert (result == PHI_RESULT (phi));
3205 add_phi_arg (phi, arg, new_edge);
3208 PENDING_STMT (old_edge) = NULL;
3211 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3212 Abort on abnormal edges. */
3214 static basic_block
3215 tree_split_edge (edge edge_in)
3217 basic_block new_bb, after_bb, dest, src;
3218 edge new_edge, e;
3220 /* Abnormal edges cannot be split. */
3221 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3223 src = edge_in->src;
3224 dest = edge_in->dest;
3226 /* Place the new block in the block list. Try to keep the new block
3227 near its "logical" location. This is of most help to humans looking
3228 at debugging dumps. */
3229 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3230 after_bb = edge_in->src;
3231 else
3232 after_bb = dest->prev_bb;
3234 new_bb = create_empty_bb (after_bb);
3235 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3236 new_bb->count = edge_in->count;
3237 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3238 new_edge->probability = REG_BR_PROB_BASE;
3239 new_edge->count = edge_in->count;
3241 e = redirect_edge_and_branch (edge_in, new_bb);
3242 gcc_assert (e);
3243 reinstall_phi_args (new_edge, e);
3245 return new_bb;
3249 /* Return true when BB has label LABEL in it. */
3251 static bool
3252 has_label_p (basic_block bb, tree label)
3254 block_stmt_iterator bsi;
3256 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3258 tree stmt = bsi_stmt (bsi);
3260 if (TREE_CODE (stmt) != LABEL_EXPR)
3261 return false;
3262 if (LABEL_EXPR_LABEL (stmt) == label)
3263 return true;
3265 return false;
3269 /* Callback for walk_tree, check that all elements with address taken are
3270 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3271 inside a PHI node. */
3273 static tree
3274 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3276 tree t = *tp, x;
3277 bool in_phi = (data != NULL);
3279 if (TYPE_P (t))
3280 *walk_subtrees = 0;
3282 /* Check operand N for being valid GIMPLE and give error MSG if not.
3283 We check for constants explicitly since they are not considered
3284 gimple invariants if they overflowed. */
3285 #define CHECK_OP(N, MSG) \
3286 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3287 && !is_gimple_val (TREE_OPERAND (t, N))) \
3288 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3290 switch (TREE_CODE (t))
3292 case SSA_NAME:
3293 if (SSA_NAME_IN_FREE_LIST (t))
3295 error ("SSA name in freelist but still referenced");
3296 return *tp;
3298 break;
3300 case ASSERT_EXPR:
3301 x = fold (ASSERT_EXPR_COND (t));
3302 if (x == boolean_false_node)
3304 error ("ASSERT_EXPR with an always-false condition");
3305 return *tp;
3307 break;
3309 case MODIFY_EXPR:
3310 x = TREE_OPERAND (t, 0);
3311 if (TREE_CODE (x) == BIT_FIELD_REF
3312 && is_gimple_reg (TREE_OPERAND (x, 0)))
3314 error ("GIMPLE register modified with BIT_FIELD_REF");
3315 return t;
3317 break;
3319 case ADDR_EXPR:
3320 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3321 dead PHIs that take the address of something. But if the PHI
3322 result is dead, the fact that it takes the address of anything
3323 is irrelevant. Because we can not tell from here if a PHI result
3324 is dead, we just skip this check for PHIs altogether. This means
3325 we may be missing "valid" checks, but what can you do?
3326 This was PR19217. */
3327 if (in_phi)
3328 break;
3330 /* Skip any references (they will be checked when we recurse down the
3331 tree) and ensure that any variable used as a prefix is marked
3332 addressable. */
3333 for (x = TREE_OPERAND (t, 0);
3334 handled_component_p (x);
3335 x = TREE_OPERAND (x, 0))
3338 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3339 return NULL;
3340 if (!TREE_ADDRESSABLE (x))
3342 error ("address taken, but ADDRESSABLE bit not set");
3343 return x;
3345 break;
3347 case COND_EXPR:
3348 x = COND_EXPR_COND (t);
3349 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3351 error ("non-boolean used in condition");
3352 return x;
3354 break;
3356 case NOP_EXPR:
3357 case CONVERT_EXPR:
3358 case FIX_TRUNC_EXPR:
3359 case FIX_CEIL_EXPR:
3360 case FIX_FLOOR_EXPR:
3361 case FIX_ROUND_EXPR:
3362 case FLOAT_EXPR:
3363 case NEGATE_EXPR:
3364 case ABS_EXPR:
3365 case BIT_NOT_EXPR:
3366 case NON_LVALUE_EXPR:
3367 case TRUTH_NOT_EXPR:
3368 CHECK_OP (0, "Invalid operand to unary operator");
3369 break;
3371 case REALPART_EXPR:
3372 case IMAGPART_EXPR:
3373 case COMPONENT_REF:
3374 case ARRAY_REF:
3375 case ARRAY_RANGE_REF:
3376 case BIT_FIELD_REF:
3377 case VIEW_CONVERT_EXPR:
3378 /* We have a nest of references. Verify that each of the operands
3379 that determine where to reference is either a constant or a variable,
3380 verify that the base is valid, and then show we've already checked
3381 the subtrees. */
3382 while (handled_component_p (t))
3384 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3385 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3386 else if (TREE_CODE (t) == ARRAY_REF
3387 || TREE_CODE (t) == ARRAY_RANGE_REF)
3389 CHECK_OP (1, "Invalid array index.");
3390 if (TREE_OPERAND (t, 2))
3391 CHECK_OP (2, "Invalid array lower bound.");
3392 if (TREE_OPERAND (t, 3))
3393 CHECK_OP (3, "Invalid array stride.");
3395 else if (TREE_CODE (t) == BIT_FIELD_REF)
3397 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3398 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3401 t = TREE_OPERAND (t, 0);
3404 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3406 error ("Invalid reference prefix.");
3407 return t;
3409 *walk_subtrees = 0;
3410 break;
3412 case LT_EXPR:
3413 case LE_EXPR:
3414 case GT_EXPR:
3415 case GE_EXPR:
3416 case EQ_EXPR:
3417 case NE_EXPR:
3418 case UNORDERED_EXPR:
3419 case ORDERED_EXPR:
3420 case UNLT_EXPR:
3421 case UNLE_EXPR:
3422 case UNGT_EXPR:
3423 case UNGE_EXPR:
3424 case UNEQ_EXPR:
3425 case LTGT_EXPR:
3426 case PLUS_EXPR:
3427 case MINUS_EXPR:
3428 case MULT_EXPR:
3429 case TRUNC_DIV_EXPR:
3430 case CEIL_DIV_EXPR:
3431 case FLOOR_DIV_EXPR:
3432 case ROUND_DIV_EXPR:
3433 case TRUNC_MOD_EXPR:
3434 case CEIL_MOD_EXPR:
3435 case FLOOR_MOD_EXPR:
3436 case ROUND_MOD_EXPR:
3437 case RDIV_EXPR:
3438 case EXACT_DIV_EXPR:
3439 case MIN_EXPR:
3440 case MAX_EXPR:
3441 case LSHIFT_EXPR:
3442 case RSHIFT_EXPR:
3443 case LROTATE_EXPR:
3444 case RROTATE_EXPR:
3445 case BIT_IOR_EXPR:
3446 case BIT_XOR_EXPR:
3447 case BIT_AND_EXPR:
3448 CHECK_OP (0, "Invalid operand to binary operator");
3449 CHECK_OP (1, "Invalid operand to binary operator");
3450 break;
3452 default:
3453 break;
3455 return NULL;
3457 #undef CHECK_OP
3461 /* Verify STMT, return true if STMT is not in GIMPLE form.
3462 TODO: Implement type checking. */
3464 static bool
3465 verify_stmt (tree stmt, bool last_in_block)
3467 tree addr;
3469 if (!is_gimple_stmt (stmt))
3471 error ("Is not a valid GIMPLE statement.");
3472 goto fail;
3475 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3476 if (addr)
3478 debug_generic_stmt (addr);
3479 return true;
3482 /* If the statement is marked as part of an EH region, then it is
3483 expected that the statement could throw. Verify that when we
3484 have optimizations that simplify statements such that we prove
3485 that they cannot throw, that we update other data structures
3486 to match. */
3487 if (lookup_stmt_eh_region (stmt) >= 0)
3489 if (!tree_could_throw_p (stmt))
3491 error ("Statement marked for throw, but doesn%'t.");
3492 goto fail;
3494 if (!last_in_block && tree_can_throw_internal (stmt))
3496 error ("Statement marked for throw in middle of block.");
3497 goto fail;
3501 return false;
3503 fail:
3504 debug_generic_stmt (stmt);
3505 return true;
3509 /* Return true when the T can be shared. */
3511 static bool
3512 tree_node_can_be_shared (tree t)
3514 if (IS_TYPE_OR_DECL_P (t)
3515 /* We check for constants explicitly since they are not considered
3516 gimple invariants if they overflowed. */
3517 || CONSTANT_CLASS_P (t)
3518 || is_gimple_min_invariant (t)
3519 || TREE_CODE (t) == SSA_NAME
3520 || t == error_mark_node)
3521 return true;
3523 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3524 return true;
3526 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3527 /* We check for constants explicitly since they are not considered
3528 gimple invariants if they overflowed. */
3529 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3530 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3531 || (TREE_CODE (t) == COMPONENT_REF
3532 || TREE_CODE (t) == REALPART_EXPR
3533 || TREE_CODE (t) == IMAGPART_EXPR))
3534 t = TREE_OPERAND (t, 0);
3536 if (DECL_P (t))
3537 return true;
3539 return false;
3543 /* Called via walk_trees. Verify tree sharing. */
3545 static tree
3546 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3548 htab_t htab = (htab_t) data;
3549 void **slot;
3551 if (tree_node_can_be_shared (*tp))
3553 *walk_subtrees = false;
3554 return NULL;
3557 slot = htab_find_slot (htab, *tp, INSERT);
3558 if (*slot)
3559 return *slot;
3560 *slot = *tp;
3562 return NULL;
3566 /* Verify the GIMPLE statement chain. */
3568 void
3569 verify_stmts (void)
3571 basic_block bb;
3572 block_stmt_iterator bsi;
3573 bool err = false;
3574 htab_t htab;
3575 tree addr;
3577 timevar_push (TV_TREE_STMT_VERIFY);
3578 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3580 FOR_EACH_BB (bb)
3582 tree phi;
3583 int i;
3585 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3587 int phi_num_args = PHI_NUM_ARGS (phi);
3589 if (bb_for_stmt (phi) != bb)
3591 error ("bb_for_stmt (phi) is set to a wrong basic block\n");
3592 err |= true;
3595 for (i = 0; i < phi_num_args; i++)
3597 tree t = PHI_ARG_DEF (phi, i);
3598 tree addr;
3600 /* Addressable variables do have SSA_NAMEs but they
3601 are not considered gimple values. */
3602 if (TREE_CODE (t) != SSA_NAME
3603 && TREE_CODE (t) != FUNCTION_DECL
3604 && !is_gimple_val (t))
3606 error ("PHI def is not a GIMPLE value");
3607 debug_generic_stmt (phi);
3608 debug_generic_stmt (t);
3609 err |= true;
3612 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3613 if (addr)
3615 debug_generic_stmt (addr);
3616 err |= true;
3619 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3620 if (addr)
3622 error ("Incorrect sharing of tree nodes");
3623 debug_generic_stmt (phi);
3624 debug_generic_stmt (addr);
3625 err |= true;
3630 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3632 tree stmt = bsi_stmt (bsi);
3634 if (bb_for_stmt (stmt) != bb)
3636 error ("bb_for_stmt (stmt) is set to a wrong basic block\n");
3637 err |= true;
3640 bsi_next (&bsi);
3641 err |= verify_stmt (stmt, bsi_end_p (bsi));
3642 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3643 if (addr)
3645 error ("Incorrect sharing of tree nodes");
3646 debug_generic_stmt (stmt);
3647 debug_generic_stmt (addr);
3648 err |= true;
3653 if (err)
3654 internal_error ("verify_stmts failed.");
3656 htab_delete (htab);
3657 timevar_pop (TV_TREE_STMT_VERIFY);
3661 /* Verifies that the flow information is OK. */
3663 static int
3664 tree_verify_flow_info (void)
3666 int err = 0;
3667 basic_block bb;
3668 block_stmt_iterator bsi;
3669 tree stmt;
3670 edge e;
3671 edge_iterator ei;
3673 if (ENTRY_BLOCK_PTR->stmt_list)
3675 error ("ENTRY_BLOCK has a statement list associated with it\n");
3676 err = 1;
3679 if (EXIT_BLOCK_PTR->stmt_list)
3681 error ("EXIT_BLOCK has a statement list associated with it\n");
3682 err = 1;
3685 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3686 if (e->flags & EDGE_FALLTHRU)
3688 error ("Fallthru to exit from bb %d\n", e->src->index);
3689 err = 1;
3692 FOR_EACH_BB (bb)
3694 bool found_ctrl_stmt = false;
3696 stmt = NULL_TREE;
3698 /* Skip labels on the start of basic block. */
3699 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3701 tree prev_stmt = stmt;
3703 stmt = bsi_stmt (bsi);
3705 if (TREE_CODE (stmt) != LABEL_EXPR)
3706 break;
3708 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3710 error ("Nonlocal label %s is not first "
3711 "in a sequence of labels in bb %d",
3712 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3713 bb->index);
3714 err = 1;
3717 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3719 error ("Label %s to block does not match in bb %d\n",
3720 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3721 bb->index);
3722 err = 1;
3725 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3726 != current_function_decl)
3728 error ("Label %s has incorrect context in bb %d\n",
3729 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3730 bb->index);
3731 err = 1;
3735 /* Verify that body of basic block BB is free of control flow. */
3736 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3738 tree stmt = bsi_stmt (bsi);
3740 if (found_ctrl_stmt)
3742 error ("Control flow in the middle of basic block %d\n",
3743 bb->index);
3744 err = 1;
3747 if (stmt_ends_bb_p (stmt))
3748 found_ctrl_stmt = true;
3750 if (TREE_CODE (stmt) == LABEL_EXPR)
3752 error ("Label %s in the middle of basic block %d\n",
3753 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3754 bb->index);
3755 err = 1;
3758 bsi = bsi_last (bb);
3759 if (bsi_end_p (bsi))
3760 continue;
3762 stmt = bsi_stmt (bsi);
3764 if (is_ctrl_stmt (stmt))
3766 FOR_EACH_EDGE (e, ei, bb->succs)
3767 if (e->flags & EDGE_FALLTHRU)
3769 error ("Fallthru edge after a control statement in bb %d \n",
3770 bb->index);
3771 err = 1;
3775 switch (TREE_CODE (stmt))
3777 case COND_EXPR:
3779 edge true_edge;
3780 edge false_edge;
3781 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3782 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3784 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3785 err = 1;
3788 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3790 if (!true_edge || !false_edge
3791 || !(true_edge->flags & EDGE_TRUE_VALUE)
3792 || !(false_edge->flags & EDGE_FALSE_VALUE)
3793 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3794 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3795 || EDGE_COUNT (bb->succs) >= 3)
3797 error ("Wrong outgoing edge flags at end of bb %d\n",
3798 bb->index);
3799 err = 1;
3802 if (!has_label_p (true_edge->dest,
3803 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3805 error ("%<then%> label does not match edge at end of bb %d\n",
3806 bb->index);
3807 err = 1;
3810 if (!has_label_p (false_edge->dest,
3811 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3813 error ("%<else%> label does not match edge at end of bb %d\n",
3814 bb->index);
3815 err = 1;
3818 break;
3820 case GOTO_EXPR:
3821 if (simple_goto_p (stmt))
3823 error ("Explicit goto at end of bb %d\n", bb->index);
3824 err = 1;
3826 else
3828 /* FIXME. We should double check that the labels in the
3829 destination blocks have their address taken. */
3830 FOR_EACH_EDGE (e, ei, bb->succs)
3831 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3832 | EDGE_FALSE_VALUE))
3833 || !(e->flags & EDGE_ABNORMAL))
3835 error ("Wrong outgoing edge flags at end of bb %d\n",
3836 bb->index);
3837 err = 1;
3840 break;
3842 case RETURN_EXPR:
3843 if (!single_succ_p (bb)
3844 || (single_succ_edge (bb)->flags
3845 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3846 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3848 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3849 err = 1;
3851 if (single_succ (bb) != EXIT_BLOCK_PTR)
3853 error ("Return edge does not point to exit in bb %d\n",
3854 bb->index);
3855 err = 1;
3857 break;
3859 case SWITCH_EXPR:
3861 tree prev;
3862 edge e;
3863 size_t i, n;
3864 tree vec;
3866 vec = SWITCH_LABELS (stmt);
3867 n = TREE_VEC_LENGTH (vec);
3869 /* Mark all the destination basic blocks. */
3870 for (i = 0; i < n; ++i)
3872 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3873 basic_block label_bb = label_to_block (lab);
3875 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3876 label_bb->aux = (void *)1;
3879 /* Verify that the case labels are sorted. */
3880 prev = TREE_VEC_ELT (vec, 0);
3881 for (i = 1; i < n - 1; ++i)
3883 tree c = TREE_VEC_ELT (vec, i);
3884 if (! CASE_LOW (c))
3886 error ("Found default case not at end of case vector");
3887 err = 1;
3888 continue;
3890 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3892 error ("Case labels not sorted:\n ");
3893 print_generic_expr (stderr, prev, 0);
3894 fprintf (stderr," is greater than ");
3895 print_generic_expr (stderr, c, 0);
3896 fprintf (stderr," but comes before it.\n");
3897 err = 1;
3899 prev = c;
3901 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3903 error ("No default case found at end of case vector");
3904 err = 1;
3907 FOR_EACH_EDGE (e, ei, bb->succs)
3909 if (!e->dest->aux)
3911 error ("Extra outgoing edge %d->%d\n",
3912 bb->index, e->dest->index);
3913 err = 1;
3915 e->dest->aux = (void *)2;
3916 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3917 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3919 error ("Wrong outgoing edge flags at end of bb %d\n",
3920 bb->index);
3921 err = 1;
3925 /* Check that we have all of them. */
3926 for (i = 0; i < n; ++i)
3928 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3929 basic_block label_bb = label_to_block (lab);
3931 if (label_bb->aux != (void *)2)
3933 error ("Missing edge %i->%i",
3934 bb->index, label_bb->index);
3935 err = 1;
3939 FOR_EACH_EDGE (e, ei, bb->succs)
3940 e->dest->aux = (void *)0;
3943 default: ;
3947 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3948 verify_dominators (CDI_DOMINATORS);
3950 return err;
3954 /* Updates phi nodes after creating a forwarder block joined
3955 by edge FALLTHRU. */
3957 static void
3958 tree_make_forwarder_block (edge fallthru)
3960 edge e;
3961 edge_iterator ei;
3962 basic_block dummy, bb;
3963 tree phi, new_phi, var;
3965 dummy = fallthru->src;
3966 bb = fallthru->dest;
3968 if (single_pred_p (bb))
3969 return;
3971 /* If we redirected a branch we must create new phi nodes at the
3972 start of BB. */
3973 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3975 var = PHI_RESULT (phi);
3976 new_phi = create_phi_node (var, bb);
3977 SSA_NAME_DEF_STMT (var) = new_phi;
3978 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3979 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3982 /* Ensure that the PHI node chain is in the same order. */
3983 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3985 /* Add the arguments we have stored on edges. */
3986 FOR_EACH_EDGE (e, ei, bb->preds)
3988 if (e == fallthru)
3989 continue;
3991 flush_pending_stmts (e);
3996 /* Return true if basic block BB does nothing except pass control
3997 flow to another block and that we can safely insert a label at
3998 the start of the successor block.
4000 As a precondition, we require that BB be not equal to
4001 ENTRY_BLOCK_PTR. */
4003 static bool
4004 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
4006 block_stmt_iterator bsi;
4008 /* BB must have a single outgoing edge. */
4009 if (single_succ_p (bb) != 1
4010 /* If PHI_WANTED is false, BB must not have any PHI nodes.
4011 Otherwise, BB must have PHI nodes. */
4012 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
4013 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
4014 || single_succ (bb) == EXIT_BLOCK_PTR
4015 /* Nor should this be an infinite loop. */
4016 || single_succ (bb) == bb
4017 /* BB may not have an abnormal outgoing edge. */
4018 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4019 return false;
4021 #if ENABLE_CHECKING
4022 gcc_assert (bb != ENTRY_BLOCK_PTR);
4023 #endif
4025 /* Now walk through the statements backward. We can ignore labels,
4026 anything else means this is not a forwarder block. */
4027 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4029 tree stmt = bsi_stmt (bsi);
4031 switch (TREE_CODE (stmt))
4033 case LABEL_EXPR:
4034 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4035 return false;
4036 break;
4038 default:
4039 return false;
4043 if (find_edge (ENTRY_BLOCK_PTR, bb))
4044 return false;
4046 if (current_loops)
4048 basic_block dest;
4049 /* Protect loop latches, headers and preheaders. */
4050 if (bb->loop_father->header == bb)
4051 return false;
4052 dest = EDGE_SUCC (bb, 0)->dest;
4054 if (dest->loop_father->header == dest)
4055 return false;
4058 return true;
4061 /* Return true if BB has at least one abnormal incoming edge. */
4063 static inline bool
4064 has_abnormal_incoming_edge_p (basic_block bb)
4066 edge e;
4067 edge_iterator ei;
4069 FOR_EACH_EDGE (e, ei, bb->preds)
4070 if (e->flags & EDGE_ABNORMAL)
4071 return true;
4073 return false;
4076 /* Removes forwarder block BB. Returns false if this failed. If a new
4077 forwarder block is created due to redirection of edges, it is
4078 stored to worklist. */
4080 static bool
4081 remove_forwarder_block (basic_block bb, basic_block **worklist)
4083 edge succ = single_succ_edge (bb), e, s;
4084 basic_block dest = succ->dest;
4085 tree label;
4086 tree phi;
4087 edge_iterator ei;
4088 block_stmt_iterator bsi, bsi_to;
4089 bool seen_abnormal_edge = false;
4091 /* We check for infinite loops already in tree_forwarder_block_p.
4092 However it may happen that the infinite loop is created
4093 afterwards due to removal of forwarders. */
4094 if (dest == bb)
4095 return false;
4097 /* If the destination block consists of a nonlocal label, do not merge
4098 it. */
4099 label = first_stmt (dest);
4100 if (label
4101 && TREE_CODE (label) == LABEL_EXPR
4102 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4103 return false;
4105 /* If there is an abnormal edge to basic block BB, but not into
4106 dest, problems might occur during removal of the phi node at out
4107 of ssa due to overlapping live ranges of registers.
4109 If there is an abnormal edge in DEST, the problems would occur
4110 anyway since cleanup_dead_labels would then merge the labels for
4111 two different eh regions, and rest of exception handling code
4112 does not like it.
4114 So if there is an abnormal edge to BB, proceed only if there is
4115 no abnormal edge to DEST and there are no phi nodes in DEST. */
4116 if (has_abnormal_incoming_edge_p (bb))
4118 seen_abnormal_edge = true;
4120 if (has_abnormal_incoming_edge_p (dest)
4121 || phi_nodes (dest) != NULL_TREE)
4122 return false;
4125 /* If there are phi nodes in DEST, and some of the blocks that are
4126 predecessors of BB are also predecessors of DEST, check that the
4127 phi node arguments match. */
4128 if (phi_nodes (dest))
4130 FOR_EACH_EDGE (e, ei, bb->preds)
4132 s = find_edge (e->src, dest);
4133 if (!s)
4134 continue;
4136 if (!phi_alternatives_equal (dest, succ, s))
4137 return false;
4141 /* Redirect the edges. */
4142 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4144 if (e->flags & EDGE_ABNORMAL)
4146 /* If there is an abnormal edge, redirect it anyway, and
4147 move the labels to the new block to make it legal. */
4148 s = redirect_edge_succ_nodup (e, dest);
4150 else
4151 s = redirect_edge_and_branch (e, dest);
4153 if (s == e)
4155 /* Create arguments for the phi nodes, since the edge was not
4156 here before. */
4157 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4158 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4160 else
4162 /* The source basic block might become a forwarder. We know
4163 that it was not a forwarder before, since it used to have
4164 at least two outgoing edges, so we may just add it to
4165 worklist. */
4166 if (tree_forwarder_block_p (s->src, false))
4167 *(*worklist)++ = s->src;
4171 if (seen_abnormal_edge)
4173 /* Move the labels to the new block, so that the redirection of
4174 the abnormal edges works. */
4176 bsi_to = bsi_start (dest);
4177 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4179 label = bsi_stmt (bsi);
4180 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4181 bsi_remove (&bsi);
4182 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4186 /* Update the dominators. */
4187 if (dom_info_available_p (CDI_DOMINATORS))
4189 basic_block dom, dombb, domdest;
4191 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4192 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4193 if (domdest == bb)
4195 /* Shortcut to avoid calling (relatively expensive)
4196 nearest_common_dominator unless necessary. */
4197 dom = dombb;
4199 else
4200 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4202 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4205 /* And kill the forwarder block. */
4206 delete_basic_block (bb);
4208 return true;
4211 /* Removes forwarder blocks. */
4213 static bool
4214 cleanup_forwarder_blocks (void)
4216 basic_block bb;
4217 bool changed = false;
4218 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4219 basic_block *current = worklist;
4221 FOR_EACH_BB (bb)
4223 if (tree_forwarder_block_p (bb, false))
4224 *current++ = bb;
4227 while (current != worklist)
4229 bb = *--current;
4230 changed |= remove_forwarder_block (bb, &current);
4233 free (worklist);
4234 return changed;
4237 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4239 static void
4240 remove_forwarder_block_with_phi (basic_block bb)
4242 edge succ = single_succ_edge (bb);
4243 basic_block dest = succ->dest;
4244 tree label;
4245 basic_block dombb, domdest, dom;
4247 /* We check for infinite loops already in tree_forwarder_block_p.
4248 However it may happen that the infinite loop is created
4249 afterwards due to removal of forwarders. */
4250 if (dest == bb)
4251 return;
4253 /* If the destination block consists of a nonlocal label, do not
4254 merge it. */
4255 label = first_stmt (dest);
4256 if (label
4257 && TREE_CODE (label) == LABEL_EXPR
4258 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4259 return;
4261 /* Redirect each incoming edge to BB to DEST. */
4262 while (EDGE_COUNT (bb->preds) > 0)
4264 edge e = EDGE_PRED (bb, 0), s;
4265 tree phi;
4267 s = find_edge (e->src, dest);
4268 if (s)
4270 /* We already have an edge S from E->src to DEST. If S and
4271 E->dest's sole successor edge have the same PHI arguments
4272 at DEST, redirect S to DEST. */
4273 if (phi_alternatives_equal (dest, s, succ))
4275 e = redirect_edge_and_branch (e, dest);
4276 PENDING_STMT (e) = NULL_TREE;
4277 continue;
4280 /* PHI arguments are different. Create a forwarder block by
4281 splitting E so that we can merge PHI arguments on E to
4282 DEST. */
4283 e = single_succ_edge (split_edge (e));
4286 s = redirect_edge_and_branch (e, dest);
4288 /* redirect_edge_and_branch must not create a new edge. */
4289 gcc_assert (s == e);
4291 /* Add to the PHI nodes at DEST each PHI argument removed at the
4292 destination of E. */
4293 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4295 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4297 if (TREE_CODE (def) == SSA_NAME)
4299 tree var;
4301 /* If DEF is one of the results of PHI nodes removed during
4302 redirection, replace it with the PHI argument that used
4303 to be on E. */
4304 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4306 tree old_arg = TREE_PURPOSE (var);
4307 tree new_arg = TREE_VALUE (var);
4309 if (def == old_arg)
4311 def = new_arg;
4312 break;
4317 add_phi_arg (phi, def, s);
4320 PENDING_STMT (e) = NULL;
4323 /* Update the dominators. */
4324 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4325 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4326 if (domdest == bb)
4328 /* Shortcut to avoid calling (relatively expensive)
4329 nearest_common_dominator unless necessary. */
4330 dom = dombb;
4332 else
4333 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4335 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4337 /* Remove BB since all of BB's incoming edges have been redirected
4338 to DEST. */
4339 delete_basic_block (bb);
4342 /* This pass merges PHI nodes if one feeds into another. For example,
4343 suppose we have the following:
4345 goto <bb 9> (<L9>);
4347 <L8>:;
4348 tem_17 = foo ();
4350 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4351 <L9>:;
4353 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4354 <L10>:;
4356 Then we merge the first PHI node into the second one like so:
4358 goto <bb 9> (<L10>);
4360 <L8>:;
4361 tem_17 = foo ();
4363 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4364 <L10>:;
4367 static void
4368 merge_phi_nodes (void)
4370 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4371 basic_block *current = worklist;
4372 basic_block bb;
4374 calculate_dominance_info (CDI_DOMINATORS);
4376 /* Find all PHI nodes that we may be able to merge. */
4377 FOR_EACH_BB (bb)
4379 basic_block dest;
4381 /* Look for a forwarder block with PHI nodes. */
4382 if (!tree_forwarder_block_p (bb, true))
4383 continue;
4385 dest = single_succ (bb);
4387 /* We have to feed into another basic block with PHI
4388 nodes. */
4389 if (!phi_nodes (dest)
4390 /* We don't want to deal with a basic block with
4391 abnormal edges. */
4392 || has_abnormal_incoming_edge_p (bb))
4393 continue;
4395 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4397 /* If BB does not dominate DEST, then the PHI nodes at
4398 DEST must be the only users of the results of the PHI
4399 nodes at BB. */
4400 *current++ = bb;
4404 /* Now let's drain WORKLIST. */
4405 while (current != worklist)
4407 bb = *--current;
4408 remove_forwarder_block_with_phi (bb);
4411 free (worklist);
4414 static bool
4415 gate_merge_phi (void)
4417 return 1;
4420 struct tree_opt_pass pass_merge_phi = {
4421 "mergephi", /* name */
4422 gate_merge_phi, /* gate */
4423 merge_phi_nodes, /* execute */
4424 NULL, /* sub */
4425 NULL, /* next */
4426 0, /* static_pass_number */
4427 TV_TREE_MERGE_PHI, /* tv_id */
4428 PROP_cfg | PROP_ssa, /* properties_required */
4429 0, /* properties_provided */
4430 0, /* properties_destroyed */
4431 0, /* todo_flags_start */
4432 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4433 | TODO_verify_ssa,
4434 0 /* letter */
4437 /* Return a non-special label in the head of basic block BLOCK.
4438 Create one if it doesn't exist. */
4440 tree
4441 tree_block_label (basic_block bb)
4443 block_stmt_iterator i, s = bsi_start (bb);
4444 bool first = true;
4445 tree label, stmt;
4447 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4449 stmt = bsi_stmt (i);
4450 if (TREE_CODE (stmt) != LABEL_EXPR)
4451 break;
4452 label = LABEL_EXPR_LABEL (stmt);
4453 if (!DECL_NONLOCAL (label))
4455 if (!first)
4456 bsi_move_before (&i, &s);
4457 return label;
4461 label = create_artificial_label ();
4462 stmt = build1 (LABEL_EXPR, void_type_node, label);
4463 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4464 return label;
4468 /* Attempt to perform edge redirection by replacing a possibly complex
4469 jump instruction by a goto or by removing the jump completely.
4470 This can apply only if all edges now point to the same block. The
4471 parameters and return values are equivalent to
4472 redirect_edge_and_branch. */
4474 static edge
4475 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4477 basic_block src = e->src;
4478 block_stmt_iterator b;
4479 tree stmt;
4481 /* We can replace or remove a complex jump only when we have exactly
4482 two edges. */
4483 if (EDGE_COUNT (src->succs) != 2
4484 /* Verify that all targets will be TARGET. Specifically, the
4485 edge that is not E must also go to TARGET. */
4486 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4487 return NULL;
4489 b = bsi_last (src);
4490 if (bsi_end_p (b))
4491 return NULL;
4492 stmt = bsi_stmt (b);
4494 if (TREE_CODE (stmt) == COND_EXPR
4495 || TREE_CODE (stmt) == SWITCH_EXPR)
4497 bsi_remove (&b);
4498 e = ssa_redirect_edge (e, target);
4499 e->flags = EDGE_FALLTHRU;
4500 return e;
4503 return NULL;
4507 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4508 edge representing the redirected branch. */
4510 static edge
4511 tree_redirect_edge_and_branch (edge e, basic_block dest)
4513 basic_block bb = e->src;
4514 block_stmt_iterator bsi;
4515 edge ret;
4516 tree label, stmt;
4518 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4519 return NULL;
4521 if (e->src != ENTRY_BLOCK_PTR
4522 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4523 return ret;
4525 if (e->dest == dest)
4526 return NULL;
4528 label = tree_block_label (dest);
4530 bsi = bsi_last (bb);
4531 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4533 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4535 case COND_EXPR:
4536 stmt = (e->flags & EDGE_TRUE_VALUE
4537 ? COND_EXPR_THEN (stmt)
4538 : COND_EXPR_ELSE (stmt));
4539 GOTO_DESTINATION (stmt) = label;
4540 break;
4542 case GOTO_EXPR:
4543 /* No non-abnormal edges should lead from a non-simple goto, and
4544 simple ones should be represented implicitly. */
4545 gcc_unreachable ();
4547 case SWITCH_EXPR:
4549 tree cases = get_cases_for_edge (e, stmt);
4551 /* If we have a list of cases associated with E, then use it
4552 as it's a lot faster than walking the entire case vector. */
4553 if (cases)
4555 edge e2 = find_edge (e->src, dest);
4556 tree last, first;
4558 first = cases;
4559 while (cases)
4561 last = cases;
4562 CASE_LABEL (cases) = label;
4563 cases = TREE_CHAIN (cases);
4566 /* If there was already an edge in the CFG, then we need
4567 to move all the cases associated with E to E2. */
4568 if (e2)
4570 tree cases2 = get_cases_for_edge (e2, stmt);
4572 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4573 TREE_CHAIN (cases2) = first;
4576 else
4578 tree vec = SWITCH_LABELS (stmt);
4579 size_t i, n = TREE_VEC_LENGTH (vec);
4581 for (i = 0; i < n; i++)
4583 tree elt = TREE_VEC_ELT (vec, i);
4585 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4586 CASE_LABEL (elt) = label;
4590 break;
4593 case RETURN_EXPR:
4594 bsi_remove (&bsi);
4595 e->flags |= EDGE_FALLTHRU;
4596 break;
4598 default:
4599 /* Otherwise it must be a fallthru edge, and we don't need to
4600 do anything besides redirecting it. */
4601 gcc_assert (e->flags & EDGE_FALLTHRU);
4602 break;
4605 /* Update/insert PHI nodes as necessary. */
4607 /* Now update the edges in the CFG. */
4608 e = ssa_redirect_edge (e, dest);
4610 return e;
4614 /* Simple wrapper, as we can always redirect fallthru edges. */
4616 static basic_block
4617 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4619 e = tree_redirect_edge_and_branch (e, dest);
4620 gcc_assert (e);
4622 return NULL;
4626 /* Splits basic block BB after statement STMT (but at least after the
4627 labels). If STMT is NULL, BB is split just after the labels. */
4629 static basic_block
4630 tree_split_block (basic_block bb, void *stmt)
4632 block_stmt_iterator bsi, bsi_tgt;
4633 tree act;
4634 basic_block new_bb;
4635 edge e;
4636 edge_iterator ei;
4638 new_bb = create_empty_bb (bb);
4640 /* Redirect the outgoing edges. */
4641 new_bb->succs = bb->succs;
4642 bb->succs = NULL;
4643 FOR_EACH_EDGE (e, ei, new_bb->succs)
4644 e->src = new_bb;
4646 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4647 stmt = NULL;
4649 /* Move everything from BSI to the new basic block. */
4650 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4652 act = bsi_stmt (bsi);
4653 if (TREE_CODE (act) == LABEL_EXPR)
4654 continue;
4656 if (!stmt)
4657 break;
4659 if (stmt == act)
4661 bsi_next (&bsi);
4662 break;
4666 bsi_tgt = bsi_start (new_bb);
4667 while (!bsi_end_p (bsi))
4669 act = bsi_stmt (bsi);
4670 bsi_remove (&bsi);
4671 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4674 return new_bb;
4678 /* Moves basic block BB after block AFTER. */
4680 static bool
4681 tree_move_block_after (basic_block bb, basic_block after)
4683 if (bb->prev_bb == after)
4684 return true;
4686 unlink_block (bb);
4687 link_block (bb, after);
4689 return true;
4693 /* Return true if basic_block can be duplicated. */
4695 static bool
4696 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4698 return true;
4701 /* Create a duplicate of the basic block BB. NOTE: This does not
4702 preserve SSA form. */
4704 static basic_block
4705 tree_duplicate_bb (basic_block bb)
4707 basic_block new_bb;
4708 block_stmt_iterator bsi, bsi_tgt;
4709 tree phi, val;
4710 ssa_op_iter op_iter;
4712 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4714 /* First copy the phi nodes. We do not copy phi node arguments here,
4715 since the edges are not ready yet. Keep the chain of phi nodes in
4716 the same order, so that we can add them later. */
4717 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4719 mark_for_rewrite (PHI_RESULT (phi));
4720 create_phi_node (PHI_RESULT (phi), new_bb);
4722 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4724 bsi_tgt = bsi_start (new_bb);
4725 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4727 tree stmt = bsi_stmt (bsi);
4728 tree copy;
4730 if (TREE_CODE (stmt) == LABEL_EXPR)
4731 continue;
4733 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4734 mark_for_rewrite (val);
4736 copy = unshare_expr (stmt);
4738 /* Copy also the virtual operands. */
4739 get_stmt_ann (copy);
4740 copy_virtual_operands (copy, stmt);
4742 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4745 return new_bb;
4748 /* Basic block BB_COPY was created by code duplication. Add phi node
4749 arguments for edges going out of BB_COPY. The blocks that were
4750 duplicated have rbi->duplicated set to one. */
4752 void
4753 add_phi_args_after_copy_bb (basic_block bb_copy)
4755 basic_block bb, dest;
4756 edge e, e_copy;
4757 edge_iterator ei;
4758 tree phi, phi_copy, phi_next, def;
4760 bb = bb_copy->rbi->original;
4762 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4764 if (!phi_nodes (e_copy->dest))
4765 continue;
4767 if (e_copy->dest->rbi->duplicated)
4768 dest = e_copy->dest->rbi->original;
4769 else
4770 dest = e_copy->dest;
4772 e = find_edge (bb, dest);
4773 if (!e)
4775 /* During loop unrolling the target of the latch edge is copied.
4776 In this case we are not looking for edge to dest, but to
4777 duplicated block whose original was dest. */
4778 FOR_EACH_EDGE (e, ei, bb->succs)
4779 if (e->dest->rbi->duplicated
4780 && e->dest->rbi->original == dest)
4781 break;
4783 gcc_assert (e != NULL);
4786 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4787 phi;
4788 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4790 phi_next = PHI_CHAIN (phi);
4792 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4793 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4794 add_phi_arg (phi_copy, def, e_copy);
4799 /* Blocks in REGION_COPY array of length N_REGION were created by
4800 duplication of basic blocks. Add phi node arguments for edges
4801 going from these blocks. */
4803 void
4804 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4806 unsigned i;
4808 for (i = 0; i < n_region; i++)
4809 region_copy[i]->rbi->duplicated = 1;
4811 for (i = 0; i < n_region; i++)
4812 add_phi_args_after_copy_bb (region_copy[i]);
4814 for (i = 0; i < n_region; i++)
4815 region_copy[i]->rbi->duplicated = 0;
4818 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4820 struct ssa_name_map_entry
4822 tree from_name;
4823 tree to_name;
4826 /* Hash function for ssa_name_map_entry. */
4828 static hashval_t
4829 ssa_name_map_entry_hash (const void *entry)
4831 const struct ssa_name_map_entry *en = entry;
4832 return SSA_NAME_VERSION (en->from_name);
4835 /* Equality function for ssa_name_map_entry. */
4837 static int
4838 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4840 const struct ssa_name_map_entry *en = in_table;
4842 return en->from_name == ssa_name;
4845 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4846 to MAP. */
4848 void
4849 allocate_ssa_names (bitmap definitions, htab_t *map)
4851 tree name;
4852 struct ssa_name_map_entry *entry;
4853 PTR *slot;
4854 unsigned ver;
4855 bitmap_iterator bi;
4857 if (!*map)
4858 *map = htab_create (10, ssa_name_map_entry_hash,
4859 ssa_name_map_entry_eq, free);
4860 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4862 name = ssa_name (ver);
4863 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4864 INSERT);
4865 if (*slot)
4866 entry = *slot;
4867 else
4869 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4870 entry->from_name = name;
4871 *slot = entry;
4873 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4877 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4878 by the mapping MAP. */
4880 static void
4881 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4883 tree name = DEF_FROM_PTR (def);
4884 struct ssa_name_map_entry *entry;
4886 gcc_assert (TREE_CODE (name) == SSA_NAME);
4888 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4889 if (!entry)
4890 return;
4892 SET_DEF (def, entry->to_name);
4893 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4896 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4898 static void
4899 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4901 tree name = USE_FROM_PTR (use);
4902 struct ssa_name_map_entry *entry;
4904 if (TREE_CODE (name) != SSA_NAME)
4905 return;
4907 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4908 if (!entry)
4909 return;
4911 SET_USE (use, entry->to_name);
4914 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4915 mapping MAP. */
4917 void
4918 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4920 unsigned i;
4921 edge e;
4922 edge_iterator ei;
4923 tree phi, stmt;
4924 block_stmt_iterator bsi;
4925 use_optype uses;
4926 vuse_optype vuses;
4927 def_optype defs;
4928 v_may_def_optype v_may_defs;
4929 v_must_def_optype v_must_defs;
4930 stmt_ann_t ann;
4932 FOR_EACH_EDGE (e, ei, bb->preds)
4933 if (e->flags & EDGE_ABNORMAL)
4934 break;
4936 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4938 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4939 if (e)
4940 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4943 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4945 stmt = bsi_stmt (bsi);
4946 ann = stmt_ann (stmt);
4948 uses = USE_OPS (ann);
4949 for (i = 0; i < NUM_USES (uses); i++)
4950 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4952 defs = DEF_OPS (ann);
4953 for (i = 0; i < NUM_DEFS (defs); i++)
4954 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4956 vuses = VUSE_OPS (ann);
4957 for (i = 0; i < NUM_VUSES (vuses); i++)
4958 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4960 v_may_defs = V_MAY_DEF_OPS (ann);
4961 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4963 rewrite_to_new_ssa_names_use
4964 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4965 rewrite_to_new_ssa_names_def
4966 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4969 v_must_defs = V_MUST_DEF_OPS (ann);
4970 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4972 rewrite_to_new_ssa_names_def
4973 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4974 rewrite_to_new_ssa_names_use
4975 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4979 FOR_EACH_EDGE (e, ei, bb->succs)
4980 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
4982 rewrite_to_new_ssa_names_use
4983 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4985 if (e->flags & EDGE_ABNORMAL)
4987 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4988 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4993 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4994 by the mapping MAP. */
4996 void
4997 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4999 unsigned r;
5001 for (r = 0; r < n_region; r++)
5002 rewrite_to_new_ssa_names_bb (region[r], map);
5005 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5006 important exit edge EXIT. By important we mean that no SSA name defined
5007 inside region is live over the other exit edges of the region. All entry
5008 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5009 to the duplicate of the region. SSA form, dominance and loop information
5010 is updated. The new basic blocks are stored to REGION_COPY in the same
5011 order as they had in REGION, provided that REGION_COPY is not NULL.
5012 The function returns false if it is unable to copy the region,
5013 true otherwise. */
5015 bool
5016 tree_duplicate_sese_region (edge entry, edge exit,
5017 basic_block *region, unsigned n_region,
5018 basic_block *region_copy)
5020 unsigned i, n_doms, ver;
5021 bool free_region_copy = false, copying_header = false;
5022 struct loop *loop = entry->dest->loop_father;
5023 edge exit_copy;
5024 bitmap definitions;
5025 tree phi;
5026 basic_block *doms;
5027 htab_t ssa_name_map = NULL;
5028 edge redirected;
5029 bitmap_iterator bi;
5031 if (!can_copy_bbs_p (region, n_region))
5032 return false;
5034 /* Some sanity checking. Note that we do not check for all possible
5035 missuses of the functions. I.e. if you ask to copy something weird,
5036 it will work, but the state of structures probably will not be
5037 correct. */
5039 for (i = 0; i < n_region; i++)
5041 /* We do not handle subloops, i.e. all the blocks must belong to the
5042 same loop. */
5043 if (region[i]->loop_father != loop)
5044 return false;
5046 if (region[i] != entry->dest
5047 && region[i] == loop->header)
5048 return false;
5051 loop->copy = loop;
5053 /* In case the function is used for loop header copying (which is the primary
5054 use), ensure that EXIT and its copy will be new latch and entry edges. */
5055 if (loop->header == entry->dest)
5057 copying_header = true;
5058 loop->copy = loop->outer;
5060 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5061 return false;
5063 for (i = 0; i < n_region; i++)
5064 if (region[i] != exit->src
5065 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5066 return false;
5069 if (!region_copy)
5071 region_copy = xmalloc (sizeof (basic_block) * n_region);
5072 free_region_copy = true;
5075 gcc_assert (!any_marked_for_rewrite_p ());
5077 /* Record blocks outside the region that are duplicated by something
5078 inside. */
5079 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
5080 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
5082 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
5083 definitions = marked_ssa_names ();
5085 if (copying_header)
5087 loop->header = exit->dest;
5088 loop->latch = exit->src;
5091 /* Redirect the entry and add the phi node arguments. */
5092 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
5093 gcc_assert (redirected != NULL);
5094 flush_pending_stmts (entry);
5096 /* Concerning updating of dominators: We must recount dominators
5097 for entry block and its copy. Anything that is outside of the region, but
5098 was dominated by something inside needs recounting as well. */
5099 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5100 doms[n_doms++] = entry->dest->rbi->original;
5101 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
5102 free (doms);
5104 /* Add the other phi node arguments. */
5105 add_phi_args_after_copy (region_copy, n_region);
5107 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
5108 uses, it should be possible to emit phi nodes just for definitions that
5109 are used outside region. */
5110 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
5112 tree name = ssa_name (ver);
5114 phi = create_phi_node (name, exit->dest);
5115 add_phi_arg (phi, name, exit);
5116 add_phi_arg (phi, name, exit_copy);
5118 SSA_NAME_DEF_STMT (name) = phi;
5121 /* And create new definitions inside region and its copy. TODO -- once we
5122 have immediate uses, it might be better to leave definitions in region
5123 unchanged, create new ssa names for phi nodes on exit, and rewrite
5124 the uses, to avoid changing the copied region. */
5125 allocate_ssa_names (definitions, &ssa_name_map);
5126 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
5127 allocate_ssa_names (definitions, &ssa_name_map);
5128 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
5129 htab_delete (ssa_name_map);
5131 if (free_region_copy)
5132 free (region_copy);
5134 unmark_all_for_rewrite ();
5135 BITMAP_FREE (definitions);
5137 return true;
5140 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5142 void
5143 dump_function_to_file (tree fn, FILE *file, int flags)
5145 tree arg, vars, var;
5146 bool ignore_topmost_bind = false, any_var = false;
5147 basic_block bb;
5148 tree chain;
5150 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5152 arg = DECL_ARGUMENTS (fn);
5153 while (arg)
5155 print_generic_expr (file, arg, dump_flags);
5156 if (TREE_CHAIN (arg))
5157 fprintf (file, ", ");
5158 arg = TREE_CHAIN (arg);
5160 fprintf (file, ")\n");
5162 if (flags & TDF_RAW)
5164 dump_node (fn, TDF_SLIM | flags, file);
5165 return;
5168 /* When GIMPLE is lowered, the variables are no longer available in
5169 BIND_EXPRs, so display them separately. */
5170 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5172 ignore_topmost_bind = true;
5174 fprintf (file, "{\n");
5175 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5177 var = TREE_VALUE (vars);
5179 print_generic_decl (file, var, flags);
5180 fprintf (file, "\n");
5182 any_var = true;
5186 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5188 /* Make a CFG based dump. */
5189 check_bb_profile (ENTRY_BLOCK_PTR, file);
5190 if (!ignore_topmost_bind)
5191 fprintf (file, "{\n");
5193 if (any_var && n_basic_blocks)
5194 fprintf (file, "\n");
5196 FOR_EACH_BB (bb)
5197 dump_generic_bb (file, bb, 2, flags);
5199 fprintf (file, "}\n");
5200 check_bb_profile (EXIT_BLOCK_PTR, file);
5202 else
5204 int indent;
5206 /* Make a tree based dump. */
5207 chain = DECL_SAVED_TREE (fn);
5209 if (TREE_CODE (chain) == BIND_EXPR)
5211 if (ignore_topmost_bind)
5213 chain = BIND_EXPR_BODY (chain);
5214 indent = 2;
5216 else
5217 indent = 0;
5219 else
5221 if (!ignore_topmost_bind)
5222 fprintf (file, "{\n");
5223 indent = 2;
5226 if (any_var)
5227 fprintf (file, "\n");
5229 print_generic_stmt_indented (file, chain, flags, indent);
5230 if (ignore_topmost_bind)
5231 fprintf (file, "}\n");
5234 fprintf (file, "\n\n");
5238 /* Pretty print of the loops intermediate representation. */
5239 static void print_loop (FILE *, struct loop *, int);
5240 static void print_pred_bbs (FILE *, basic_block bb);
5241 static void print_succ_bbs (FILE *, basic_block bb);
5244 /* Print the predecessors indexes of edge E on FILE. */
5246 static void
5247 print_pred_bbs (FILE *file, basic_block bb)
5249 edge e;
5250 edge_iterator ei;
5252 FOR_EACH_EDGE (e, ei, bb->preds)
5253 fprintf (file, "bb_%d", e->src->index);
5257 /* Print the successors indexes of edge E on FILE. */
5259 static void
5260 print_succ_bbs (FILE *file, basic_block bb)
5262 edge e;
5263 edge_iterator ei;
5265 FOR_EACH_EDGE (e, ei, bb->succs)
5266 fprintf (file, "bb_%d", e->src->index);
5270 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5272 static void
5273 print_loop (FILE *file, struct loop *loop, int indent)
5275 char *s_indent;
5276 basic_block bb;
5278 if (loop == NULL)
5279 return;
5281 s_indent = (char *) alloca ((size_t) indent + 1);
5282 memset ((void *) s_indent, ' ', (size_t) indent);
5283 s_indent[indent] = '\0';
5285 /* Print the loop's header. */
5286 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5288 /* Print the loop's body. */
5289 fprintf (file, "%s{\n", s_indent);
5290 FOR_EACH_BB (bb)
5291 if (bb->loop_father == loop)
5293 /* Print the basic_block's header. */
5294 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5295 print_pred_bbs (file, bb);
5296 fprintf (file, "}, succs = {");
5297 print_succ_bbs (file, bb);
5298 fprintf (file, "})\n");
5300 /* Print the basic_block's body. */
5301 fprintf (file, "%s {\n", s_indent);
5302 tree_dump_bb (bb, file, indent + 4);
5303 fprintf (file, "%s }\n", s_indent);
5306 print_loop (file, loop->inner, indent + 2);
5307 fprintf (file, "%s}\n", s_indent);
5308 print_loop (file, loop->next, indent);
5312 /* Follow a CFG edge from the entry point of the program, and on entry
5313 of a loop, pretty print the loop structure on FILE. */
5315 void
5316 print_loop_ir (FILE *file)
5318 basic_block bb;
5320 bb = BASIC_BLOCK (0);
5321 if (bb && bb->loop_father)
5322 print_loop (file, bb->loop_father, 0);
5326 /* Debugging loops structure at tree level. */
5328 void
5329 debug_loop_ir (void)
5331 print_loop_ir (stderr);
5335 /* Return true if BB ends with a call, possibly followed by some
5336 instructions that must stay with the call. Return false,
5337 otherwise. */
5339 static bool
5340 tree_block_ends_with_call_p (basic_block bb)
5342 block_stmt_iterator bsi = bsi_last (bb);
5343 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5347 /* Return true if BB ends with a conditional branch. Return false,
5348 otherwise. */
5350 static bool
5351 tree_block_ends_with_condjump_p (basic_block bb)
5353 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5354 return (TREE_CODE (stmt) == COND_EXPR);
5358 /* Return true if we need to add fake edge to exit at statement T.
5359 Helper function for tree_flow_call_edges_add. */
5361 static bool
5362 need_fake_edge_p (tree t)
5364 tree call;
5366 /* NORETURN and LONGJMP calls already have an edge to exit.
5367 CONST and PURE calls do not need one.
5368 We don't currently check for CONST and PURE here, although
5369 it would be a good idea, because those attributes are
5370 figured out from the RTL in mark_constant_function, and
5371 the counter incrementation code from -fprofile-arcs
5372 leads to different results from -fbranch-probabilities. */
5373 call = get_call_expr_in (t);
5374 if (call
5375 && !(call_expr_flags (call) & ECF_NORETURN))
5376 return true;
5378 if (TREE_CODE (t) == ASM_EXPR
5379 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5380 return true;
5382 return false;
5386 /* Add fake edges to the function exit for any non constant and non
5387 noreturn calls, volatile inline assembly in the bitmap of blocks
5388 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5389 the number of blocks that were split.
5391 The goal is to expose cases in which entering a basic block does
5392 not imply that all subsequent instructions must be executed. */
5394 static int
5395 tree_flow_call_edges_add (sbitmap blocks)
5397 int i;
5398 int blocks_split = 0;
5399 int last_bb = last_basic_block;
5400 bool check_last_block = false;
5402 if (n_basic_blocks == 0)
5403 return 0;
5405 if (! blocks)
5406 check_last_block = true;
5407 else
5408 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5410 /* In the last basic block, before epilogue generation, there will be
5411 a fallthru edge to EXIT. Special care is required if the last insn
5412 of the last basic block is a call because make_edge folds duplicate
5413 edges, which would result in the fallthru edge also being marked
5414 fake, which would result in the fallthru edge being removed by
5415 remove_fake_edges, which would result in an invalid CFG.
5417 Moreover, we can't elide the outgoing fake edge, since the block
5418 profiler needs to take this into account in order to solve the minimal
5419 spanning tree in the case that the call doesn't return.
5421 Handle this by adding a dummy instruction in a new last basic block. */
5422 if (check_last_block)
5424 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5425 block_stmt_iterator bsi = bsi_last (bb);
5426 tree t = NULL_TREE;
5427 if (!bsi_end_p (bsi))
5428 t = bsi_stmt (bsi);
5430 if (need_fake_edge_p (t))
5432 edge e;
5434 e = find_edge (bb, EXIT_BLOCK_PTR);
5435 if (e)
5437 bsi_insert_on_edge (e, build_empty_stmt ());
5438 bsi_commit_edge_inserts ();
5443 /* Now add fake edges to the function exit for any non constant
5444 calls since there is no way that we can determine if they will
5445 return or not... */
5446 for (i = 0; i < last_bb; i++)
5448 basic_block bb = BASIC_BLOCK (i);
5449 block_stmt_iterator bsi;
5450 tree stmt, last_stmt;
5452 if (!bb)
5453 continue;
5455 if (blocks && !TEST_BIT (blocks, i))
5456 continue;
5458 bsi = bsi_last (bb);
5459 if (!bsi_end_p (bsi))
5461 last_stmt = bsi_stmt (bsi);
5464 stmt = bsi_stmt (bsi);
5465 if (need_fake_edge_p (stmt))
5467 edge e;
5468 /* The handling above of the final block before the
5469 epilogue should be enough to verify that there is
5470 no edge to the exit block in CFG already.
5471 Calling make_edge in such case would cause us to
5472 mark that edge as fake and remove it later. */
5473 #ifdef ENABLE_CHECKING
5474 if (stmt == last_stmt)
5476 e = find_edge (bb, EXIT_BLOCK_PTR);
5477 gcc_assert (e == NULL);
5479 #endif
5481 /* Note that the following may create a new basic block
5482 and renumber the existing basic blocks. */
5483 if (stmt != last_stmt)
5485 e = split_block (bb, stmt);
5486 if (e)
5487 blocks_split++;
5489 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5491 bsi_prev (&bsi);
5493 while (!bsi_end_p (bsi));
5497 if (blocks_split)
5498 verify_flow_info ();
5500 return blocks_split;
5503 bool
5504 tree_purge_dead_eh_edges (basic_block bb)
5506 bool changed = false;
5507 edge e;
5508 edge_iterator ei;
5509 tree stmt = last_stmt (bb);
5511 if (stmt && tree_can_throw_internal (stmt))
5512 return false;
5514 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5516 if (e->flags & EDGE_EH)
5518 remove_edge (e);
5519 changed = true;
5521 else
5522 ei_next (&ei);
5525 /* Removal of dead EH edges might change dominators of not
5526 just immediate successors. E.g. when bb1 is changed so that
5527 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5528 eh edges purged by this function in:
5532 1-->2
5533 / \ |
5534 v v |
5535 3-->4 |
5537 --->5
5540 idom(bb5) must be recomputed. For now just free the dominance
5541 info. */
5542 if (changed)
5543 free_dominance_info (CDI_DOMINATORS);
5545 return changed;
5548 bool
5549 tree_purge_all_dead_eh_edges (bitmap blocks)
5551 bool changed = false;
5552 unsigned i;
5553 bitmap_iterator bi;
5555 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5557 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5560 return changed;
5563 /* This function is called whenever a new edge is created or
5564 redirected. */
5566 static void
5567 tree_execute_on_growing_pred (edge e)
5569 basic_block bb = e->dest;
5571 if (phi_nodes (bb))
5572 reserve_phi_args_for_new_edge (bb);
5575 /* This function is called immediately before edge E is removed from
5576 the edge vector E->dest->preds. */
5578 static void
5579 tree_execute_on_shrinking_pred (edge e)
5581 if (phi_nodes (e->dest))
5582 remove_phi_args (e);
5585 /*---------------------------------------------------------------------------
5586 Helper functions for Loop versioning
5587 ---------------------------------------------------------------------------*/
5589 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5590 of 'first'. Both of them are dominated by 'new_head' basic block. When
5591 'new_head' was created by 'second's incoming edge it received phi arguments
5592 on the edge by split_edge(). Later, additional edge 'e' was created to
5593 connect 'new_head' and 'first'. Now this routine adds phi args on this
5594 additional edge 'e' that new_head to second edge received as part of edge
5595 splitting.
5598 static void
5599 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5600 basic_block new_head, edge e)
5602 tree phi1, phi2;
5604 /* Browse all 'second' basic block phi nodes and add phi args to
5605 edge 'e' for 'first' head. PHI args are always in correct order. */
5607 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5608 phi2 && phi1;
5609 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5611 edge e2 = find_edge (new_head, second);
5613 if (e2)
5615 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5616 add_phi_arg (phi1, def, e);
5621 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5622 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5623 the destination of the ELSE part. */
5624 static void
5625 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5626 basic_block cond_bb, void *cond_e)
5628 block_stmt_iterator bsi;
5629 tree goto1 = NULL_TREE;
5630 tree goto2 = NULL_TREE;
5631 tree new_cond_expr = NULL_TREE;
5632 tree cond_expr = (tree) cond_e;
5633 edge e0;
5635 /* Build new conditional expr */
5636 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5637 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5638 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5640 /* Add new cond in cond_bb. */
5641 bsi = bsi_start (cond_bb);
5642 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5643 /* Adjust edges appropriately to connect new head with first head
5644 as well as second head. */
5645 e0 = single_succ_edge (cond_bb);
5646 e0->flags &= ~EDGE_FALLTHRU;
5647 e0->flags |= EDGE_FALSE_VALUE;
5650 struct cfg_hooks tree_cfg_hooks = {
5651 "tree",
5652 tree_verify_flow_info,
5653 tree_dump_bb, /* dump_bb */
5654 create_bb, /* create_basic_block */
5655 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5656 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5657 remove_bb, /* delete_basic_block */
5658 tree_split_block, /* split_block */
5659 tree_move_block_after, /* move_block_after */
5660 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5661 tree_merge_blocks, /* merge_blocks */
5662 tree_predict_edge, /* predict_edge */
5663 tree_predicted_by_p, /* predicted_by_p */
5664 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5665 tree_duplicate_bb, /* duplicate_block */
5666 tree_split_edge, /* split_edge */
5667 tree_make_forwarder_block, /* make_forward_block */
5668 NULL, /* tidy_fallthru_edge */
5669 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5670 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5671 tree_flow_call_edges_add, /* flow_call_edges_add */
5672 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5673 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5674 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5675 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5676 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5677 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5678 flush_pending_stmts /* flush_pending_stmts */
5682 /* Split all critical edges. */
5684 static void
5685 split_critical_edges (void)
5687 basic_block bb;
5688 edge e;
5689 edge_iterator ei;
5691 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5692 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5693 mappings around the calls to split_edge. */
5694 start_recording_case_labels ();
5695 FOR_ALL_BB (bb)
5697 FOR_EACH_EDGE (e, ei, bb->succs)
5698 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5700 split_edge (e);
5703 end_recording_case_labels ();
5706 struct tree_opt_pass pass_split_crit_edges =
5708 "crited", /* name */
5709 NULL, /* gate */
5710 split_critical_edges, /* execute */
5711 NULL, /* sub */
5712 NULL, /* next */
5713 0, /* static_pass_number */
5714 TV_TREE_SPLIT_EDGES, /* tv_id */
5715 PROP_cfg, /* properties required */
5716 PROP_no_crit_edges, /* properties_provided */
5717 0, /* properties_destroyed */
5718 0, /* todo_flags_start */
5719 TODO_dump_func, /* todo_flags_finish */
5720 0 /* letter */
5724 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5725 a temporary, make sure and register it to be renamed if necessary,
5726 and finally return the temporary. Put the statements to compute
5727 EXP before the current statement in BSI. */
5729 tree
5730 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5732 tree t, new_stmt, orig_stmt;
5734 if (is_gimple_val (exp))
5735 return exp;
5737 t = make_rename_temp (type, NULL);
5738 new_stmt = build (MODIFY_EXPR, type, t, exp);
5740 orig_stmt = bsi_stmt (*bsi);
5741 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5742 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5744 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5746 return t;
5749 /* Build a ternary operation and gimplify it. Emit code before BSI.
5750 Return the gimple_val holding the result. */
5752 tree
5753 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5754 tree type, tree a, tree b, tree c)
5756 tree ret;
5758 ret = fold (build3 (code, type, a, b, c));
5759 STRIP_NOPS (ret);
5761 return gimplify_val (bsi, type, ret);
5764 /* Build a binary operation and gimplify it. Emit code before BSI.
5765 Return the gimple_val holding the result. */
5767 tree
5768 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5769 tree type, tree a, tree b)
5771 tree ret;
5773 ret = fold (build2 (code, type, a, b));
5774 STRIP_NOPS (ret);
5776 return gimplify_val (bsi, type, ret);
5779 /* Build a unary operation and gimplify it. Emit code before BSI.
5780 Return the gimple_val holding the result. */
5782 tree
5783 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5784 tree a)
5786 tree ret;
5788 ret = fold (build1 (code, type, a));
5789 STRIP_NOPS (ret);
5791 return gimplify_val (bsi, type, ret);
5796 /* Emit return warnings. */
5798 static void
5799 execute_warn_function_return (void)
5801 #ifdef USE_MAPPED_LOCATION
5802 source_location location;
5803 #else
5804 location_t *locus;
5805 #endif
5806 tree last;
5807 edge e;
5808 edge_iterator ei;
5810 if (warn_missing_noreturn
5811 && !TREE_THIS_VOLATILE (cfun->decl)
5812 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5813 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5814 warning ("%Jfunction might be possible candidate for "
5815 "attribute %<noreturn%>",
5816 cfun->decl);
5818 /* If we have a path to EXIT, then we do return. */
5819 if (TREE_THIS_VOLATILE (cfun->decl)
5820 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5822 #ifdef USE_MAPPED_LOCATION
5823 location = UNKNOWN_LOCATION;
5824 #else
5825 locus = NULL;
5826 #endif
5827 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5829 last = last_stmt (e->src);
5830 if (TREE_CODE (last) == RETURN_EXPR
5831 #ifdef USE_MAPPED_LOCATION
5832 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5833 #else
5834 && (locus = EXPR_LOCUS (last)) != NULL)
5835 #endif
5836 break;
5838 #ifdef USE_MAPPED_LOCATION
5839 if (location == UNKNOWN_LOCATION)
5840 location = cfun->function_end_locus;
5841 warning ("%H%<noreturn%> function does return", &location);
5842 #else
5843 if (!locus)
5844 locus = &cfun->function_end_locus;
5845 warning ("%H%<noreturn%> function does return", locus);
5846 #endif
5849 /* If we see "return;" in some basic block, then we do reach the end
5850 without returning a value. */
5851 else if (warn_return_type
5852 && !TREE_NO_WARNING (cfun->decl)
5853 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5854 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5856 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5858 tree last = last_stmt (e->src);
5859 if (TREE_CODE (last) == RETURN_EXPR
5860 && TREE_OPERAND (last, 0) == NULL)
5862 #ifdef USE_MAPPED_LOCATION
5863 location = EXPR_LOCATION (last);
5864 if (location == UNKNOWN_LOCATION)
5865 location = cfun->function_end_locus;
5866 warning ("%Hcontrol reaches end of non-void function", &location);
5867 #else
5868 locus = EXPR_LOCUS (last);
5869 if (!locus)
5870 locus = &cfun->function_end_locus;
5871 warning ("%Hcontrol reaches end of non-void function", locus);
5872 #endif
5873 TREE_NO_WARNING (cfun->decl) = 1;
5874 break;
5881 /* Given a basic block B which ends with a conditional and has
5882 precisely two successors, determine which of the edges is taken if
5883 the conditional is true and which is taken if the conditional is
5884 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5886 void
5887 extract_true_false_edges_from_block (basic_block b,
5888 edge *true_edge,
5889 edge *false_edge)
5891 edge e = EDGE_SUCC (b, 0);
5893 if (e->flags & EDGE_TRUE_VALUE)
5895 *true_edge = e;
5896 *false_edge = EDGE_SUCC (b, 1);
5898 else
5900 *false_edge = e;
5901 *true_edge = EDGE_SUCC (b, 1);
5905 struct tree_opt_pass pass_warn_function_return =
5907 NULL, /* name */
5908 NULL, /* gate */
5909 execute_warn_function_return, /* execute */
5910 NULL, /* sub */
5911 NULL, /* next */
5912 0, /* static_pass_number */
5913 0, /* tv_id */
5914 PROP_cfg, /* properties_required */
5915 0, /* properties_provided */
5916 0, /* properties_destroyed */
5917 0, /* todo_flags_start */
5918 0, /* todo_flags_finish */
5919 0 /* letter */