PR tree-optimization/18707
[official-gcc.git] / gcc / tree-cfg.c
blobe020676f7a457f30940d15b7dc256ca61f5863d7
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
86 static htab_t edge_to_cases;
88 /* CFG statistics. */
89 struct cfg_stats_d
91 long num_merged_labels;
94 static struct cfg_stats_d cfg_stats;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
99 /* Basic blocks and flowgraphs. */
100 static basic_block create_bb (void *, void *, basic_block);
101 static void create_block_annotation (basic_block);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree);
105 static void factor_computed_gotos (void);
107 /* Edges. */
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block);
110 static void make_exit_edges (basic_block);
111 static void make_cond_expr_edges (basic_block);
112 static void make_switch_expr_edges (basic_block);
113 static void make_goto_expr_edges (basic_block);
114 static edge tree_redirect_edge_and_branch (edge, basic_block);
115 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116 static void split_critical_edges (void);
118 /* Various helpers. */
119 static inline bool stmt_starts_bb_p (tree, tree);
120 static int tree_verify_flow_info (void);
121 static void tree_make_forwarder_block (edge);
122 static bool tree_forwarder_block_p (basic_block);
123 static void tree_cfg2vcg (FILE *);
125 /* Flowgraph optimization and cleanup. */
126 static void tree_merge_blocks (basic_block, basic_block);
127 static bool tree_can_merge_blocks_p (basic_block, basic_block);
128 static void remove_bb (basic_block);
129 static bool cleanup_control_flow (void);
130 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
131 static edge find_taken_edge_cond_expr (basic_block, tree);
132 static edge find_taken_edge_switch_expr (basic_block, tree);
133 static tree find_case_label_for_value (tree, tree);
134 static bool phi_alternatives_equal (basic_block, edge, edge);
135 static bool cleanup_forwarder_blocks (void);
138 /*---------------------------------------------------------------------------
139 Create basic blocks
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
145 static void
146 build_tree_cfg (tree *tp)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 /* Initialize rbi_pool. */
152 alloc_rbi_pool ();
154 /* Initialize the basic block array. */
155 init_flow ();
156 profile_status = PROFILE_ABSENT;
157 n_basic_blocks = 0;
158 last_basic_block = 0;
159 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
160 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
162 /* Build a mapping of labels to their associated blocks. */
163 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
164 "label to block map");
166 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
167 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
169 found_computed_goto = 0;
170 make_blocks (*tp);
172 /* Computed gotos are hell to deal with, especially if there are
173 lots of them with a large number of destinations. So we factor
174 them to a common computed goto location before we build the
175 edge list. After we convert back to normal form, we will un-factor
176 the computed gotos since factoring introduces an unwanted jump. */
177 if (found_computed_goto)
178 factor_computed_gotos ();
180 /* Make sure there is always at least one block, even if it's empty. */
181 if (n_basic_blocks == 0)
182 create_empty_bb (ENTRY_BLOCK_PTR);
184 create_block_annotation (ENTRY_BLOCK_PTR);
185 create_block_annotation (EXIT_BLOCK_PTR);
187 /* Adjust the size of the array. */
188 VARRAY_GROW (basic_block_info, n_basic_blocks);
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
198 /* Create the edges of the flowgraph. */
199 make_edges ();
201 /* Debugging dumps. */
203 /* Write the flowgraph to a VCG file. */
205 int local_dump_flags;
206 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
207 if (dump_file)
209 tree_cfg2vcg (dump_file);
210 dump_end (TDI_vcg, dump_file);
214 /* Dump a textual representation of the flowgraph. */
215 if (dump_file)
216 dump_tree_cfg (dump_file, dump_flags);
219 static void
220 execute_build_cfg (void)
222 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
225 struct tree_opt_pass pass_build_cfg =
227 "cfg", /* name */
228 NULL, /* gate */
229 execute_build_cfg, /* execute */
230 NULL, /* sub */
231 NULL, /* next */
232 0, /* static_pass_number */
233 TV_TREE_CFG, /* tv_id */
234 PROP_gimple_leh, /* properties_required */
235 PROP_cfg, /* properties_provided */
236 0, /* properties_destroyed */
237 0, /* todo_flags_start */
238 TODO_verify_stmts, /* todo_flags_finish */
239 0 /* letter */
242 /* Search the CFG for any computed gotos. If found, factor them to a
243 common computed goto site. Also record the location of that site so
244 that we can un-factor the gotos after we have converted back to
245 normal form. */
247 static void
248 factor_computed_gotos (void)
250 basic_block bb;
251 tree factored_label_decl = NULL;
252 tree var = NULL;
253 tree factored_computed_goto_label = NULL;
254 tree factored_computed_goto = NULL;
256 /* We know there are one or more computed gotos in this function.
257 Examine the last statement in each basic block to see if the block
258 ends with a computed goto. */
260 FOR_EACH_BB (bb)
262 block_stmt_iterator bsi = bsi_last (bb);
263 tree last;
265 if (bsi_end_p (bsi))
266 continue;
267 last = bsi_stmt (bsi);
269 /* Ignore the computed goto we create when we factor the original
270 computed gotos. */
271 if (last == factored_computed_goto)
272 continue;
274 /* If the last statement is a computed goto, factor it. */
275 if (computed_goto_p (last))
277 tree assignment;
279 /* The first time we find a computed goto we need to create
280 the factored goto block and the variable each original
281 computed goto will use for their goto destination. */
282 if (! factored_computed_goto)
284 basic_block new_bb = create_empty_bb (bb);
285 block_stmt_iterator new_bsi = bsi_start (new_bb);
287 /* Create the destination of the factored goto. Each original
288 computed goto will put its desired destination into this
289 variable and jump to the label we create immediately
290 below. */
291 var = create_tmp_var (ptr_type_node, "gotovar");
293 /* Build a label for the new block which will contain the
294 factored computed goto. */
295 factored_label_decl = create_artificial_label ();
296 factored_computed_goto_label
297 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
298 bsi_insert_after (&new_bsi, factored_computed_goto_label,
299 BSI_NEW_STMT);
301 /* Build our new computed goto. */
302 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
303 bsi_insert_after (&new_bsi, factored_computed_goto,
304 BSI_NEW_STMT);
307 /* Copy the original computed goto's destination into VAR. */
308 assignment = build (MODIFY_EXPR, ptr_type_node,
309 var, GOTO_DESTINATION (last));
310 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
312 /* And re-vector the computed goto to the new destination. */
313 GOTO_DESTINATION (last) = factored_label_decl;
319 /* Create annotations for a single basic block. */
321 static void
322 create_block_annotation (basic_block bb)
324 /* Verify that the tree_annotations field is clear. */
325 gcc_assert (!bb->tree_annotations);
326 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
330 /* Free the annotations for all the basic blocks. */
332 static void free_blocks_annotations (void)
334 clear_blocks_annotations ();
338 /* Clear the annotations for all the basic blocks. */
340 static void
341 clear_blocks_annotations (void)
343 basic_block bb;
345 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
346 bb->tree_annotations = NULL;
350 /* Build a flowgraph for the statement_list STMT_LIST. */
352 static void
353 make_blocks (tree stmt_list)
355 tree_stmt_iterator i = tsi_start (stmt_list);
356 tree stmt = NULL;
357 bool start_new_block = true;
358 bool first_stmt_of_list = true;
359 basic_block bb = ENTRY_BLOCK_PTR;
361 while (!tsi_end_p (i))
363 tree prev_stmt;
365 prev_stmt = stmt;
366 stmt = tsi_stmt (i);
368 /* If the statement starts a new basic block or if we have determined
369 in a previous pass that we need to create a new block for STMT, do
370 so now. */
371 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
373 if (!first_stmt_of_list)
374 stmt_list = tsi_split_statement_list_before (&i);
375 bb = create_basic_block (stmt_list, NULL, bb);
376 start_new_block = false;
379 /* Now add STMT to BB and create the subgraphs for special statement
380 codes. */
381 set_bb_for_stmt (stmt, bb);
383 if (computed_goto_p (stmt))
384 found_computed_goto = true;
386 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
387 next iteration. */
388 if (stmt_ends_bb_p (stmt))
389 start_new_block = true;
391 tsi_next (&i);
392 first_stmt_of_list = false;
397 /* Create and return a new empty basic block after bb AFTER. */
399 static basic_block
400 create_bb (void *h, void *e, basic_block after)
402 basic_block bb;
404 gcc_assert (!e);
406 /* Create and initialize a new basic block. Since alloc_block uses
407 ggc_alloc_cleared to allocate a basic block, we do not have to
408 clear the newly allocated basic block here. */
409 bb = alloc_block ();
411 bb->index = last_basic_block;
412 bb->flags = BB_NEW;
413 bb->stmt_list = h ? h : alloc_stmt_list ();
415 /* Add the new block to the linked list of blocks. */
416 link_block (bb, after);
418 /* Grow the basic block array if needed. */
419 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
421 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
422 VARRAY_GROW (basic_block_info, new_size);
425 /* Add the newly created block to the array. */
426 BASIC_BLOCK (last_basic_block) = bb;
428 create_block_annotation (bb);
430 n_basic_blocks++;
431 last_basic_block++;
433 initialize_bb_rbi (bb);
434 return bb;
438 /*---------------------------------------------------------------------------
439 Edge creation
440 ---------------------------------------------------------------------------*/
442 /* Join all the blocks in the flowgraph. */
444 static void
445 make_edges (void)
447 basic_block bb;
449 /* Create an edge from entry to the first block with executable
450 statements in it. */
451 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
453 /* Traverse the basic block array placing edges. */
454 FOR_EACH_BB (bb)
456 tree first = first_stmt (bb);
457 tree last = last_stmt (bb);
459 if (first)
461 /* Edges for statements that always alter flow control. */
462 if (is_ctrl_stmt (last))
463 make_ctrl_stmt_edges (bb);
465 /* Edges for statements that sometimes alter flow control. */
466 if (is_ctrl_altering_stmt (last))
467 make_exit_edges (bb);
470 /* Finally, if no edges were created above, this is a regular
471 basic block that only needs a fallthru edge. */
472 if (EDGE_COUNT (bb->succs) == 0)
473 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
476 /* We do not care about fake edges, so remove any that the CFG
477 builder inserted for completeness. */
478 remove_fake_exit_edges ();
480 /* Clean up the graph and warn for unreachable code. */
481 cleanup_tree_cfg ();
485 /* Create edges for control statement at basic block BB. */
487 static void
488 make_ctrl_stmt_edges (basic_block bb)
490 tree last = last_stmt (bb);
492 gcc_assert (last);
493 switch (TREE_CODE (last))
495 case GOTO_EXPR:
496 make_goto_expr_edges (bb);
497 break;
499 case RETURN_EXPR:
500 make_edge (bb, EXIT_BLOCK_PTR, 0);
501 break;
503 case COND_EXPR:
504 make_cond_expr_edges (bb);
505 break;
507 case SWITCH_EXPR:
508 make_switch_expr_edges (bb);
509 break;
511 case RESX_EXPR:
512 make_eh_edges (last);
513 /* Yet another NORETURN hack. */
514 if (EDGE_COUNT (bb->succs) == 0)
515 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
516 break;
518 default:
519 gcc_unreachable ();
524 /* Create exit edges for statements in block BB that alter the flow of
525 control. Statements that alter the control flow are 'goto', 'return'
526 and calls to non-returning functions. */
528 static void
529 make_exit_edges (basic_block bb)
531 tree last = last_stmt (bb), op;
533 gcc_assert (last);
534 switch (TREE_CODE (last))
536 case CALL_EXPR:
537 /* If this function receives a nonlocal goto, then we need to
538 make edges from this call site to all the nonlocal goto
539 handlers. */
540 if (TREE_SIDE_EFFECTS (last)
541 && current_function_has_nonlocal_label)
542 make_goto_expr_edges (bb);
544 /* If this statement has reachable exception handlers, then
545 create abnormal edges to them. */
546 make_eh_edges (last);
548 /* Some calls are known not to return. For such calls we create
549 a fake edge.
551 We really need to revamp how we build edges so that it's not
552 such a bloody pain to avoid creating edges for this case since
553 all we do is remove these edges when we're done building the
554 CFG. */
555 if (call_expr_flags (last) & ECF_NORETURN)
557 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
558 return;
561 /* Don't forget the fall-thru edge. */
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
565 case MODIFY_EXPR:
566 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
567 may have an abnormal edge. Search the RHS for this case and
568 create any required edges. */
569 op = get_call_expr_in (last);
570 if (op && TREE_SIDE_EFFECTS (op)
571 && current_function_has_nonlocal_label)
572 make_goto_expr_edges (bb);
574 make_eh_edges (last);
575 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
576 break;
578 default:
579 gcc_unreachable ();
584 /* Create the edges for a COND_EXPR starting at block BB.
585 At this point, both clauses must contain only simple gotos. */
587 static void
588 make_cond_expr_edges (basic_block bb)
590 tree entry = last_stmt (bb);
591 basic_block then_bb, else_bb;
592 tree then_label, else_label;
594 gcc_assert (entry);
595 gcc_assert (TREE_CODE (entry) == COND_EXPR);
597 /* Entry basic blocks for each component. */
598 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
599 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
600 then_bb = label_to_block (then_label);
601 else_bb = label_to_block (else_label);
603 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
604 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
607 /* Hashing routine for EDGE_TO_CASES. */
609 static hashval_t
610 edge_to_cases_hash (const void *p)
612 edge e = ((struct edge_to_cases_elt *)p)->e;
614 /* Hash on the edge itself (which is a pointer). */
615 return htab_hash_pointer (e);
618 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
619 for equality is just a pointer comparison. */
621 static int
622 edge_to_cases_eq (const void *p1, const void *p2)
624 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
625 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627 return e1 == e2;
630 /* Called for each element in the hash table (P) as we delete the
631 edge to cases hash table.
633 Clear all the TREE_CHAINs to prevent problems with copying of
634 SWITCH_EXPRs and structure sharing rules, then free the hash table
635 element. */
637 static void
638 edge_to_cases_cleanup (void *p)
640 struct edge_to_cases_elt *elt = p;
641 tree t, next;
643 for (t = elt->case_labels; t; t = next)
645 next = TREE_CHAIN (t);
646 TREE_CHAIN (t) = NULL;
648 free (p);
651 /* Start recording information mapping edges to case labels. */
653 static void
654 start_recording_case_labels (void)
656 gcc_assert (edge_to_cases == NULL);
658 edge_to_cases = htab_create (37,
659 edge_to_cases_hash,
660 edge_to_cases_eq,
661 edge_to_cases_cleanup);
664 /* Return nonzero if we are recording information for case labels. */
666 static bool
667 recording_case_labels_p (void)
669 return (edge_to_cases != NULL);
672 /* Stop recording information mapping edges to case labels and
673 remove any information we have recorded. */
674 static void
675 end_recording_case_labels (void)
677 htab_delete (edge_to_cases);
678 edge_to_cases = NULL;
681 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
683 static void
684 record_switch_edge (edge e, tree case_label)
686 struct edge_to_cases_elt *elt;
687 void **slot;
689 /* Build a hash table element so we can see if E is already
690 in the table. */
691 elt = xmalloc (sizeof (struct edge_to_cases_elt));
692 elt->e = e;
693 elt->case_labels = case_label;
695 slot = htab_find_slot (edge_to_cases, elt, INSERT);
697 if (*slot == NULL)
699 /* E was not in the hash table. Install E into the hash table. */
700 *slot = (void *)elt;
702 else
704 /* E was already in the hash table. Free ELT as we do not need it
705 anymore. */
706 free (elt);
708 /* Get the entry stored in the hash table. */
709 elt = (struct edge_to_cases_elt *) *slot;
711 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
712 TREE_CHAIN (case_label) = elt->case_labels;
713 elt->case_labels = case_label;
717 /* If we are inside a {start,end}_recording_cases block, then return
718 a chain of CASE_LABEL_EXPRs from T which reference E.
720 Otherwise return NULL. */
722 static tree
723 get_cases_for_edge (edge e, tree t)
725 struct edge_to_cases_elt elt, *elt_p;
726 void **slot;
727 size_t i, n;
728 tree vec;
730 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
731 chains available. Return NULL so the caller can detect this case. */
732 if (!recording_case_labels_p ())
733 return NULL;
735 restart:
736 elt.e = e;
737 elt.case_labels = NULL;
738 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740 if (slot)
742 elt_p = (struct edge_to_cases_elt *)*slot;
743 return elt_p->case_labels;
746 /* If we did not find E in the hash table, then this must be the first
747 time we have been queried for information about E & T. Add all the
748 elements from T to the hash table then perform the query again. */
750 vec = SWITCH_LABELS (t);
751 n = TREE_VEC_LENGTH (vec);
752 for (i = 0; i < n; i++)
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
756 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758 goto restart;
761 /* Create the edges for a SWITCH_EXPR starting at block BB.
762 At this point, the switch body has been lowered and the
763 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
765 static void
766 make_switch_expr_edges (basic_block bb)
768 tree entry = last_stmt (bb);
769 size_t i, n;
770 tree vec;
772 vec = SWITCH_LABELS (entry);
773 n = TREE_VEC_LENGTH (vec);
775 for (i = 0; i < n; ++i)
777 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
778 basic_block label_bb = label_to_block (lab);
779 make_edge (bb, label_bb, 0);
784 /* Return the basic block holding label DEST. */
786 basic_block
787 label_to_block (tree dest)
789 int uid = LABEL_DECL_UID (dest);
791 /* We would die hard when faced by an undefined label. Emit a label to
792 the very first basic block. This will hopefully make even the dataflow
793 and undefined variable warnings quite right. */
794 if ((errorcount || sorrycount) && uid < 0)
796 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
797 tree stmt;
799 stmt = build1 (LABEL_EXPR, void_type_node, dest);
800 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
801 uid = LABEL_DECL_UID (dest);
803 return VARRAY_BB (label_to_block_map, uid);
807 /* Create edges for a goto statement at block BB. */
809 static void
810 make_goto_expr_edges (basic_block bb)
812 tree goto_t, dest;
813 basic_block target_bb;
814 int for_call;
815 block_stmt_iterator last = bsi_last (bb);
817 goto_t = bsi_stmt (last);
819 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
820 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
821 from a nonlocal goto. */
822 if (TREE_CODE (goto_t) != GOTO_EXPR)
824 dest = error_mark_node;
825 for_call = 1;
827 else
829 dest = GOTO_DESTINATION (goto_t);
830 for_call = 0;
832 /* A GOTO to a local label creates normal edges. */
833 if (simple_goto_p (goto_t))
835 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
836 #ifdef USE_MAPPED_LOCATION
837 e->goto_locus = EXPR_LOCATION (goto_t);
838 #else
839 e->goto_locus = EXPR_LOCUS (goto_t);
840 #endif
841 bsi_remove (&last);
842 return;
845 /* Nothing more to do for nonlocal gotos. */
846 if (TREE_CODE (dest) == LABEL_DECL)
847 return;
849 /* Computed gotos remain. */
852 /* Look for the block starting with the destination label. In the
853 case of a computed goto, make an edge to any label block we find
854 in the CFG. */
855 FOR_EACH_BB (target_bb)
857 block_stmt_iterator bsi;
859 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
861 tree target = bsi_stmt (bsi);
863 if (TREE_CODE (target) != LABEL_EXPR)
864 break;
866 if (
867 /* Computed GOTOs. Make an edge to every label block that has
868 been marked as a potential target for a computed goto. */
869 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
870 /* Nonlocal GOTO target. Make an edge to every label block
871 that has been marked as a potential target for a nonlocal
872 goto. */
873 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
875 make_edge (bb, target_bb, EDGE_ABNORMAL);
876 break;
881 /* Degenerate case of computed goto with no labels. */
882 if (!for_call && EDGE_COUNT (bb->succs) == 0)
883 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
887 /*---------------------------------------------------------------------------
888 Flowgraph analysis
889 ---------------------------------------------------------------------------*/
891 /* Remove unreachable blocks and other miscellaneous clean up work. */
893 bool
894 cleanup_tree_cfg (void)
896 bool retval = false;
898 timevar_push (TV_TREE_CLEANUP_CFG);
900 retval = cleanup_control_flow ();
901 retval |= delete_unreachable_blocks ();
903 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
904 which can get expensive. So we want to enable recording of edge
905 to CASE_LABEL_EXPR mappings around the call to
906 cleanup_forwarder_blocks. */
907 start_recording_case_labels ();
908 retval |= cleanup_forwarder_blocks ();
909 end_recording_case_labels ();
911 #ifdef ENABLE_CHECKING
912 if (retval)
914 gcc_assert (!cleanup_control_flow ());
915 gcc_assert (!delete_unreachable_blocks ());
916 gcc_assert (!cleanup_forwarder_blocks ());
918 #endif
920 /* Merging the blocks creates no new opportunities for the other
921 optimizations, so do it here. */
922 retval |= merge_seq_blocks ();
924 compact_blocks ();
926 #ifdef ENABLE_CHECKING
927 verify_flow_info ();
928 #endif
929 timevar_pop (TV_TREE_CLEANUP_CFG);
930 return retval;
934 /* Cleanup useless labels in basic blocks. This is something we wish
935 to do early because it allows us to group case labels before creating
936 the edges for the CFG, and it speeds up block statement iterators in
937 all passes later on.
938 We only run this pass once, running it more than once is probably not
939 profitable. */
941 /* A map from basic block index to the leading label of that block. */
942 static tree *label_for_bb;
944 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
945 static void
946 update_eh_label (struct eh_region *region)
948 tree old_label = get_eh_region_tree_label (region);
949 if (old_label)
951 tree new_label;
952 basic_block bb = label_to_block (old_label);
954 /* ??? After optimizing, there may be EH regions with labels
955 that have already been removed from the function body, so
956 there is no basic block for them. */
957 if (! bb)
958 return;
960 new_label = label_for_bb[bb->index];
961 set_eh_region_tree_label (region, new_label);
965 /* Given LABEL return the first label in the same basic block. */
966 static tree
967 main_block_label (tree label)
969 basic_block bb = label_to_block (label);
971 /* label_to_block possibly inserted undefined label into the chain. */
972 if (!label_for_bb[bb->index])
973 label_for_bb[bb->index] = label;
974 return label_for_bb[bb->index];
977 /* Cleanup redundant labels. This is a three-step process:
978 1) Find the leading label for each block.
979 2) Redirect all references to labels to the leading labels.
980 3) Cleanup all useless labels. */
982 void
983 cleanup_dead_labels (void)
985 basic_block bb;
986 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
988 /* Find a suitable label for each block. We use the first user-defined
989 label if there is one, or otherwise just the first label we see. */
990 FOR_EACH_BB (bb)
992 block_stmt_iterator i;
994 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
996 tree label, stmt = bsi_stmt (i);
998 if (TREE_CODE (stmt) != LABEL_EXPR)
999 break;
1001 label = LABEL_EXPR_LABEL (stmt);
1003 /* If we have not yet seen a label for the current block,
1004 remember this one and see if there are more labels. */
1005 if (! label_for_bb[bb->index])
1007 label_for_bb[bb->index] = label;
1008 continue;
1011 /* If we did see a label for the current block already, but it
1012 is an artificially created label, replace it if the current
1013 label is a user defined label. */
1014 if (! DECL_ARTIFICIAL (label)
1015 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1017 label_for_bb[bb->index] = label;
1018 break;
1023 /* Now redirect all jumps/branches to the selected label.
1024 First do so for each block ending in a control statement. */
1025 FOR_EACH_BB (bb)
1027 tree stmt = last_stmt (bb);
1028 if (!stmt)
1029 continue;
1031 switch (TREE_CODE (stmt))
1033 case COND_EXPR:
1035 tree true_branch, false_branch;
1037 true_branch = COND_EXPR_THEN (stmt);
1038 false_branch = COND_EXPR_ELSE (stmt);
1040 GOTO_DESTINATION (true_branch)
1041 = main_block_label (GOTO_DESTINATION (true_branch));
1042 GOTO_DESTINATION (false_branch)
1043 = main_block_label (GOTO_DESTINATION (false_branch));
1045 break;
1048 case SWITCH_EXPR:
1050 size_t i;
1051 tree vec = SWITCH_LABELS (stmt);
1052 size_t n = TREE_VEC_LENGTH (vec);
1054 /* Replace all destination labels. */
1055 for (i = 0; i < n; ++i)
1057 tree elt = TREE_VEC_ELT (vec, i);
1058 tree label = main_block_label (CASE_LABEL (elt));
1059 CASE_LABEL (elt) = label;
1061 break;
1064 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1065 remove them until after we've created the CFG edges. */
1066 case GOTO_EXPR:
1067 if (! computed_goto_p (stmt))
1069 GOTO_DESTINATION (stmt)
1070 = main_block_label (GOTO_DESTINATION (stmt));
1071 break;
1074 default:
1075 break;
1079 for_each_eh_region (update_eh_label);
1081 /* Finally, purge dead labels. All user-defined labels and labels that
1082 can be the target of non-local gotos are preserved. */
1083 FOR_EACH_BB (bb)
1085 block_stmt_iterator i;
1086 tree label_for_this_bb = label_for_bb[bb->index];
1088 if (! label_for_this_bb)
1089 continue;
1091 for (i = bsi_start (bb); !bsi_end_p (i); )
1093 tree label, stmt = bsi_stmt (i);
1095 if (TREE_CODE (stmt) != LABEL_EXPR)
1096 break;
1098 label = LABEL_EXPR_LABEL (stmt);
1100 if (label == label_for_this_bb
1101 || ! DECL_ARTIFICIAL (label)
1102 || DECL_NONLOCAL (label))
1103 bsi_next (&i);
1104 else
1105 bsi_remove (&i);
1109 free (label_for_bb);
1112 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1113 and scan the sorted vector of cases. Combine the ones jumping to the
1114 same label.
1115 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1117 void
1118 group_case_labels (void)
1120 basic_block bb;
1122 FOR_EACH_BB (bb)
1124 tree stmt = last_stmt (bb);
1125 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1127 tree labels = SWITCH_LABELS (stmt);
1128 int old_size = TREE_VEC_LENGTH (labels);
1129 int i, j, new_size = old_size;
1130 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1131 tree default_label;
1133 /* The default label is always the last case in a switch
1134 statement after gimplification. */
1135 default_label = CASE_LABEL (default_case);
1137 /* Look for possible opportunities to merge cases.
1138 Ignore the last element of the label vector because it
1139 must be the default case. */
1140 i = 0;
1141 while (i < old_size - 1)
1143 tree base_case, base_label, base_high, type;
1144 base_case = TREE_VEC_ELT (labels, i);
1146 gcc_assert (base_case);
1147 base_label = CASE_LABEL (base_case);
1149 /* Discard cases that have the same destination as the
1150 default case. */
1151 if (base_label == default_label)
1153 TREE_VEC_ELT (labels, i) = NULL_TREE;
1154 i++;
1155 new_size--;
1156 continue;
1159 type = TREE_TYPE (CASE_LOW (base_case));
1160 base_high = CASE_HIGH (base_case) ?
1161 CASE_HIGH (base_case) : CASE_LOW (base_case);
1162 i++;
1163 /* Try to merge case labels. Break out when we reach the end
1164 of the label vector or when we cannot merge the next case
1165 label with the current one. */
1166 while (i < old_size - 1)
1168 tree merge_case = TREE_VEC_ELT (labels, i);
1169 tree merge_label = CASE_LABEL (merge_case);
1170 tree t = int_const_binop (PLUS_EXPR, base_high,
1171 integer_one_node, 1);
1173 /* Merge the cases if they jump to the same place,
1174 and their ranges are consecutive. */
1175 if (merge_label == base_label
1176 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1178 base_high = CASE_HIGH (merge_case) ?
1179 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1180 CASE_HIGH (base_case) = base_high;
1181 TREE_VEC_ELT (labels, i) = NULL_TREE;
1182 new_size--;
1183 i++;
1185 else
1186 break;
1190 /* Compress the case labels in the label vector, and adjust the
1191 length of the vector. */
1192 for (i = 0, j = 0; i < new_size; i++)
1194 while (! TREE_VEC_ELT (labels, j))
1195 j++;
1196 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1198 TREE_VEC_LENGTH (labels) = new_size;
1203 /* Checks whether we can merge block B into block A. */
1205 static bool
1206 tree_can_merge_blocks_p (basic_block a, basic_block b)
1208 tree stmt;
1209 block_stmt_iterator bsi;
1211 if (EDGE_COUNT (a->succs) != 1)
1212 return false;
1214 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
1215 return false;
1217 if (EDGE_SUCC (a, 0)->dest != b)
1218 return false;
1220 if (b == EXIT_BLOCK_PTR)
1221 return false;
1223 if (EDGE_COUNT (b->preds) > 1)
1224 return false;
1226 /* If A ends by a statement causing exceptions or something similar, we
1227 cannot merge the blocks. */
1228 stmt = last_stmt (a);
1229 if (stmt && stmt_ends_bb_p (stmt))
1230 return false;
1232 /* Do not allow a block with only a non-local label to be merged. */
1233 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1234 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1235 return false;
1237 /* There may be no phi nodes at the start of b. Most of these degenerate
1238 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1239 if (phi_nodes (b))
1240 return false;
1242 /* Do not remove user labels. */
1243 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1245 stmt = bsi_stmt (bsi);
1246 if (TREE_CODE (stmt) != LABEL_EXPR)
1247 break;
1248 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1249 return false;
1252 return true;
1256 /* Merge block B into block A. */
1258 static void
1259 tree_merge_blocks (basic_block a, basic_block b)
1261 block_stmt_iterator bsi;
1262 tree_stmt_iterator last;
1264 if (dump_file)
1265 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1267 /* Ensure that B follows A. */
1268 move_block_after (b, a);
1270 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1271 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1273 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1274 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1276 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1277 bsi_remove (&bsi);
1278 else
1280 set_bb_for_stmt (bsi_stmt (bsi), a);
1281 bsi_next (&bsi);
1285 /* Merge the chains. */
1286 last = tsi_last (a->stmt_list);
1287 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1288 b->stmt_list = NULL;
1292 /* Walk the function tree removing unnecessary statements.
1294 * Empty statement nodes are removed
1296 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1298 * Unnecessary COND_EXPRs are removed
1300 * Some unnecessary BIND_EXPRs are removed
1302 Clearly more work could be done. The trick is doing the analysis
1303 and removal fast enough to be a net improvement in compile times.
1305 Note that when we remove a control structure such as a COND_EXPR
1306 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1307 to ensure we eliminate all the useless code. */
1309 struct rus_data
1311 tree *last_goto;
1312 bool repeat;
1313 bool may_throw;
1314 bool may_branch;
1315 bool has_label;
1318 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1320 static bool
1321 remove_useless_stmts_warn_notreached (tree stmt)
1323 if (EXPR_HAS_LOCATION (stmt))
1325 location_t loc = EXPR_LOCATION (stmt);
1326 warning ("%Hwill never be executed", &loc);
1327 return true;
1330 switch (TREE_CODE (stmt))
1332 case STATEMENT_LIST:
1334 tree_stmt_iterator i;
1335 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1336 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1337 return true;
1339 break;
1341 case COND_EXPR:
1342 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1343 return true;
1344 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1345 return true;
1346 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1347 return true;
1348 break;
1350 case TRY_FINALLY_EXPR:
1351 case TRY_CATCH_EXPR:
1352 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1353 return true;
1354 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1355 return true;
1356 break;
1358 case CATCH_EXPR:
1359 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1360 case EH_FILTER_EXPR:
1361 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1362 case BIND_EXPR:
1363 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1365 default:
1366 /* Not a live container. */
1367 break;
1370 return false;
1373 static void
1374 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1376 tree then_clause, else_clause, cond;
1377 bool save_has_label, then_has_label, else_has_label;
1379 save_has_label = data->has_label;
1380 data->has_label = false;
1381 data->last_goto = NULL;
1383 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1385 then_has_label = data->has_label;
1386 data->has_label = false;
1387 data->last_goto = NULL;
1389 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1391 else_has_label = data->has_label;
1392 data->has_label = save_has_label | then_has_label | else_has_label;
1394 then_clause = COND_EXPR_THEN (*stmt_p);
1395 else_clause = COND_EXPR_ELSE (*stmt_p);
1396 cond = fold (COND_EXPR_COND (*stmt_p));
1398 /* If neither arm does anything at all, we can remove the whole IF. */
1399 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1401 *stmt_p = build_empty_stmt ();
1402 data->repeat = true;
1405 /* If there are no reachable statements in an arm, then we can
1406 zap the entire conditional. */
1407 else if (integer_nonzerop (cond) && !else_has_label)
1409 if (warn_notreached)
1410 remove_useless_stmts_warn_notreached (else_clause);
1411 *stmt_p = then_clause;
1412 data->repeat = true;
1414 else if (integer_zerop (cond) && !then_has_label)
1416 if (warn_notreached)
1417 remove_useless_stmts_warn_notreached (then_clause);
1418 *stmt_p = else_clause;
1419 data->repeat = true;
1422 /* Check a couple of simple things on then/else with single stmts. */
1423 else
1425 tree then_stmt = expr_only (then_clause);
1426 tree else_stmt = expr_only (else_clause);
1428 /* Notice branches to a common destination. */
1429 if (then_stmt && else_stmt
1430 && TREE_CODE (then_stmt) == GOTO_EXPR
1431 && TREE_CODE (else_stmt) == GOTO_EXPR
1432 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1434 *stmt_p = then_stmt;
1435 data->repeat = true;
1438 /* If the THEN/ELSE clause merely assigns a value to a variable or
1439 parameter which is already known to contain that value, then
1440 remove the useless THEN/ELSE clause. */
1441 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1443 if (else_stmt
1444 && TREE_CODE (else_stmt) == MODIFY_EXPR
1445 && TREE_OPERAND (else_stmt, 0) == cond
1446 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1447 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1449 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1450 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1451 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1452 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1454 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1455 ? then_stmt : else_stmt);
1456 tree *location = (TREE_CODE (cond) == EQ_EXPR
1457 ? &COND_EXPR_THEN (*stmt_p)
1458 : &COND_EXPR_ELSE (*stmt_p));
1460 if (stmt
1461 && TREE_CODE (stmt) == MODIFY_EXPR
1462 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1463 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1464 *location = alloc_stmt_list ();
1468 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1469 would be re-introduced during lowering. */
1470 data->last_goto = NULL;
1474 static void
1475 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1477 bool save_may_branch, save_may_throw;
1478 bool this_may_branch, this_may_throw;
1480 /* Collect may_branch and may_throw information for the body only. */
1481 save_may_branch = data->may_branch;
1482 save_may_throw = data->may_throw;
1483 data->may_branch = false;
1484 data->may_throw = false;
1485 data->last_goto = NULL;
1487 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1489 this_may_branch = data->may_branch;
1490 this_may_throw = data->may_throw;
1491 data->may_branch |= save_may_branch;
1492 data->may_throw |= save_may_throw;
1493 data->last_goto = NULL;
1495 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1497 /* If the body is empty, then we can emit the FINALLY block without
1498 the enclosing TRY_FINALLY_EXPR. */
1499 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1501 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1502 data->repeat = true;
1505 /* If the handler is empty, then we can emit the TRY block without
1506 the enclosing TRY_FINALLY_EXPR. */
1507 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1509 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1510 data->repeat = true;
1513 /* If the body neither throws, nor branches, then we can safely
1514 string the TRY and FINALLY blocks together. */
1515 else if (!this_may_branch && !this_may_throw)
1517 tree stmt = *stmt_p;
1518 *stmt_p = TREE_OPERAND (stmt, 0);
1519 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1520 data->repeat = true;
1525 static void
1526 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1528 bool save_may_throw, this_may_throw;
1529 tree_stmt_iterator i;
1530 tree stmt;
1532 /* Collect may_throw information for the body only. */
1533 save_may_throw = data->may_throw;
1534 data->may_throw = false;
1535 data->last_goto = NULL;
1537 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1539 this_may_throw = data->may_throw;
1540 data->may_throw = save_may_throw;
1542 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1543 if (!this_may_throw)
1545 if (warn_notreached)
1546 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1547 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1548 data->repeat = true;
1549 return;
1552 /* Process the catch clause specially. We may be able to tell that
1553 no exceptions propagate past this point. */
1555 this_may_throw = true;
1556 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1557 stmt = tsi_stmt (i);
1558 data->last_goto = NULL;
1560 switch (TREE_CODE (stmt))
1562 case CATCH_EXPR:
1563 for (; !tsi_end_p (i); tsi_next (&i))
1565 stmt = tsi_stmt (i);
1566 /* If we catch all exceptions, then the body does not
1567 propagate exceptions past this point. */
1568 if (CATCH_TYPES (stmt) == NULL)
1569 this_may_throw = false;
1570 data->last_goto = NULL;
1571 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1573 break;
1575 case EH_FILTER_EXPR:
1576 if (EH_FILTER_MUST_NOT_THROW (stmt))
1577 this_may_throw = false;
1578 else if (EH_FILTER_TYPES (stmt) == NULL)
1579 this_may_throw = false;
1580 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1581 break;
1583 default:
1584 /* Otherwise this is a cleanup. */
1585 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1587 /* If the cleanup is empty, then we can emit the TRY block without
1588 the enclosing TRY_CATCH_EXPR. */
1589 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1591 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1592 data->repeat = true;
1594 break;
1596 data->may_throw |= this_may_throw;
1600 static void
1601 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1603 tree block;
1605 /* First remove anything underneath the BIND_EXPR. */
1606 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1608 /* If the BIND_EXPR has no variables, then we can pull everything
1609 up one level and remove the BIND_EXPR, unless this is the toplevel
1610 BIND_EXPR for the current function or an inlined function.
1612 When this situation occurs we will want to apply this
1613 optimization again. */
1614 block = BIND_EXPR_BLOCK (*stmt_p);
1615 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1616 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1617 && (! block
1618 || ! BLOCK_ABSTRACT_ORIGIN (block)
1619 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1620 != FUNCTION_DECL)))
1622 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1623 data->repeat = true;
1628 static void
1629 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1631 tree dest = GOTO_DESTINATION (*stmt_p);
1633 data->may_branch = true;
1634 data->last_goto = NULL;
1636 /* Record the last goto expr, so that we can delete it if unnecessary. */
1637 if (TREE_CODE (dest) == LABEL_DECL)
1638 data->last_goto = stmt_p;
1642 static void
1643 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1645 tree label = LABEL_EXPR_LABEL (*stmt_p);
1647 data->has_label = true;
1649 /* We do want to jump across non-local label receiver code. */
1650 if (DECL_NONLOCAL (label))
1651 data->last_goto = NULL;
1653 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1655 *data->last_goto = build_empty_stmt ();
1656 data->repeat = true;
1659 /* ??? Add something here to delete unused labels. */
1663 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1664 decl. This allows us to eliminate redundant or useless
1665 calls to "const" functions.
1667 Gimplifier already does the same operation, but we may notice functions
1668 being const and pure once their calls has been gimplified, so we need
1669 to update the flag. */
1671 static void
1672 update_call_expr_flags (tree call)
1674 tree decl = get_callee_fndecl (call);
1675 if (!decl)
1676 return;
1677 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1678 TREE_SIDE_EFFECTS (call) = 0;
1679 if (TREE_NOTHROW (decl))
1680 TREE_NOTHROW (call) = 1;
1684 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1686 void
1687 notice_special_calls (tree t)
1689 int flags = call_expr_flags (t);
1691 if (flags & ECF_MAY_BE_ALLOCA)
1692 current_function_calls_alloca = true;
1693 if (flags & ECF_RETURNS_TWICE)
1694 current_function_calls_setjmp = true;
1698 /* Clear flags set by notice_special_calls. Used by dead code removal
1699 to update the flags. */
1701 void
1702 clear_special_calls (void)
1704 current_function_calls_alloca = false;
1705 current_function_calls_setjmp = false;
1709 static void
1710 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1712 tree t = *tp, op;
1714 switch (TREE_CODE (t))
1716 case COND_EXPR:
1717 remove_useless_stmts_cond (tp, data);
1718 break;
1720 case TRY_FINALLY_EXPR:
1721 remove_useless_stmts_tf (tp, data);
1722 break;
1724 case TRY_CATCH_EXPR:
1725 remove_useless_stmts_tc (tp, data);
1726 break;
1728 case BIND_EXPR:
1729 remove_useless_stmts_bind (tp, data);
1730 break;
1732 case GOTO_EXPR:
1733 remove_useless_stmts_goto (tp, data);
1734 break;
1736 case LABEL_EXPR:
1737 remove_useless_stmts_label (tp, data);
1738 break;
1740 case RETURN_EXPR:
1741 fold_stmt (tp);
1742 data->last_goto = NULL;
1743 data->may_branch = true;
1744 break;
1746 case CALL_EXPR:
1747 fold_stmt (tp);
1748 data->last_goto = NULL;
1749 notice_special_calls (t);
1750 update_call_expr_flags (t);
1751 if (tree_could_throw_p (t))
1752 data->may_throw = true;
1753 break;
1755 case MODIFY_EXPR:
1756 data->last_goto = NULL;
1757 fold_stmt (tp);
1758 op = get_call_expr_in (t);
1759 if (op)
1761 update_call_expr_flags (op);
1762 notice_special_calls (op);
1764 if (tree_could_throw_p (t))
1765 data->may_throw = true;
1766 break;
1768 case STATEMENT_LIST:
1770 tree_stmt_iterator i = tsi_start (t);
1771 while (!tsi_end_p (i))
1773 t = tsi_stmt (i);
1774 if (IS_EMPTY_STMT (t))
1776 tsi_delink (&i);
1777 continue;
1780 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1782 t = tsi_stmt (i);
1783 if (TREE_CODE (t) == STATEMENT_LIST)
1785 tsi_link_before (&i, t, TSI_SAME_STMT);
1786 tsi_delink (&i);
1788 else
1789 tsi_next (&i);
1792 break;
1793 case ASM_EXPR:
1794 fold_stmt (tp);
1795 data->last_goto = NULL;
1796 break;
1798 default:
1799 data->last_goto = NULL;
1800 break;
1804 static void
1805 remove_useless_stmts (void)
1807 struct rus_data data;
1809 clear_special_calls ();
1813 memset (&data, 0, sizeof (data));
1814 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1816 while (data.repeat);
1820 struct tree_opt_pass pass_remove_useless_stmts =
1822 "useless", /* name */
1823 NULL, /* gate */
1824 remove_useless_stmts, /* execute */
1825 NULL, /* sub */
1826 NULL, /* next */
1827 0, /* static_pass_number */
1828 0, /* tv_id */
1829 PROP_gimple_any, /* properties_required */
1830 0, /* properties_provided */
1831 0, /* properties_destroyed */
1832 0, /* todo_flags_start */
1833 TODO_dump_func, /* todo_flags_finish */
1834 0 /* letter */
1838 /* Remove obviously useless statements in basic block BB. */
1840 static void
1841 cfg_remove_useless_stmts_bb (basic_block bb)
1843 block_stmt_iterator bsi;
1844 tree stmt = NULL_TREE;
1845 tree cond, var = NULL_TREE, val = NULL_TREE;
1846 struct var_ann_d *ann;
1848 /* Check whether we come here from a condition, and if so, get the
1849 condition. */
1850 if (EDGE_COUNT (bb->preds) != 1
1851 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1852 return;
1854 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
1856 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1858 var = cond;
1859 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1860 ? boolean_false_node : boolean_true_node);
1862 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1863 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1864 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1866 var = TREE_OPERAND (cond, 0);
1867 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1868 ? boolean_true_node : boolean_false_node);
1870 else
1872 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
1873 cond = invert_truthvalue (cond);
1874 if (TREE_CODE (cond) == EQ_EXPR
1875 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1876 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1877 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1878 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1879 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1881 var = TREE_OPERAND (cond, 0);
1882 val = TREE_OPERAND (cond, 1);
1884 else
1885 return;
1888 /* Only work for normal local variables. */
1889 ann = var_ann (var);
1890 if (!ann
1891 || ann->may_aliases
1892 || TREE_ADDRESSABLE (var))
1893 return;
1895 if (! TREE_CONSTANT (val))
1897 ann = var_ann (val);
1898 if (!ann
1899 || ann->may_aliases
1900 || TREE_ADDRESSABLE (val))
1901 return;
1904 /* Ignore floating point variables, since comparison behaves weird for
1905 them. */
1906 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1907 return;
1909 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1911 stmt = bsi_stmt (bsi);
1913 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1914 which is already known to contain that value, then remove the useless
1915 THEN/ELSE clause. */
1916 if (TREE_CODE (stmt) == MODIFY_EXPR
1917 && TREE_OPERAND (stmt, 0) == var
1918 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1920 bsi_remove (&bsi);
1921 continue;
1924 /* Invalidate the var if we encounter something that could modify it.
1925 Likewise for the value it was previously set to. Note that we only
1926 consider values that are either a VAR_DECL or PARM_DECL so we
1927 can test for conflict very simply. */
1928 if (TREE_CODE (stmt) == ASM_EXPR
1929 || (TREE_CODE (stmt) == MODIFY_EXPR
1930 && (TREE_OPERAND (stmt, 0) == var
1931 || TREE_OPERAND (stmt, 0) == val)))
1932 return;
1934 bsi_next (&bsi);
1939 /* A CFG-aware version of remove_useless_stmts. */
1941 void
1942 cfg_remove_useless_stmts (void)
1944 basic_block bb;
1946 #ifdef ENABLE_CHECKING
1947 verify_flow_info ();
1948 #endif
1950 FOR_EACH_BB (bb)
1952 cfg_remove_useless_stmts_bb (bb);
1957 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1959 static void
1960 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1962 tree phi;
1964 /* Since this block is no longer reachable, we can just delete all
1965 of its PHI nodes. */
1966 phi = phi_nodes (bb);
1967 while (phi)
1969 tree next = PHI_CHAIN (phi);
1970 remove_phi_node (phi, NULL_TREE, bb);
1971 phi = next;
1974 /* Remove edges to BB's successors. */
1975 while (EDGE_COUNT (bb->succs) > 0)
1976 remove_edge (EDGE_SUCC (bb, 0));
1980 /* Remove statements of basic block BB. */
1982 static void
1983 remove_bb (basic_block bb)
1985 block_stmt_iterator i;
1986 source_locus loc = 0;
1988 if (dump_file)
1990 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1991 if (dump_flags & TDF_DETAILS)
1993 dump_bb (bb, dump_file, 0);
1994 fprintf (dump_file, "\n");
1998 /* Remove all the instructions in the block. */
1999 for (i = bsi_start (bb); !bsi_end_p (i);)
2001 tree stmt = bsi_stmt (i);
2002 if (TREE_CODE (stmt) == LABEL_EXPR
2003 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2005 basic_block new_bb = bb->prev_bb;
2006 block_stmt_iterator new_bsi = bsi_start (new_bb);
2008 bsi_remove (&i);
2009 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2011 else
2013 release_defs (stmt);
2015 set_bb_for_stmt (stmt, NULL);
2016 bsi_remove (&i);
2019 /* Don't warn for removed gotos. Gotos are often removed due to
2020 jump threading, thus resulting in bogus warnings. Not great,
2021 since this way we lose warnings for gotos in the original
2022 program that are indeed unreachable. */
2023 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2024 #ifdef USE_MAPPED_LOCATION
2025 loc = EXPR_LOCATION (stmt);
2026 #else
2027 loc = EXPR_LOCUS (stmt);
2028 #endif
2031 /* If requested, give a warning that the first statement in the
2032 block is unreachable. We walk statements backwards in the
2033 loop above, so the last statement we process is the first statement
2034 in the block. */
2035 if (warn_notreached && loc)
2036 #ifdef USE_MAPPED_LOCATION
2037 warning ("%Hwill never be executed", &loc);
2038 #else
2039 warning ("%Hwill never be executed", loc);
2040 #endif
2042 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2045 /* Try to remove superfluous control structures. */
2047 static bool
2048 cleanup_control_flow (void)
2050 basic_block bb;
2051 block_stmt_iterator bsi;
2052 bool retval = false;
2053 tree stmt;
2055 FOR_EACH_BB (bb)
2057 bsi = bsi_last (bb);
2059 if (bsi_end_p (bsi))
2060 continue;
2062 stmt = bsi_stmt (bsi);
2063 if (TREE_CODE (stmt) == COND_EXPR
2064 || TREE_CODE (stmt) == SWITCH_EXPR)
2065 retval |= cleanup_control_expr_graph (bb, bsi);
2067 return retval;
2071 /* Disconnect an unreachable block in the control expression starting
2072 at block BB. */
2074 static bool
2075 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2077 edge taken_edge;
2078 bool retval = false;
2079 tree expr = bsi_stmt (bsi), val;
2081 if (EDGE_COUNT (bb->succs) > 1)
2083 edge e;
2084 edge_iterator ei;
2086 switch (TREE_CODE (expr))
2088 case COND_EXPR:
2089 val = COND_EXPR_COND (expr);
2090 break;
2092 case SWITCH_EXPR:
2093 val = SWITCH_COND (expr);
2094 if (TREE_CODE (val) != INTEGER_CST)
2095 return false;
2096 break;
2098 default:
2099 gcc_unreachable ();
2102 taken_edge = find_taken_edge (bb, val);
2103 if (!taken_edge)
2104 return false;
2106 /* Remove all the edges except the one that is always executed. */
2107 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2109 if (e != taken_edge)
2111 taken_edge->probability += e->probability;
2112 taken_edge->count += e->count;
2113 remove_edge (e);
2114 retval = true;
2116 else
2117 ei_next (&ei);
2119 if (taken_edge->probability > REG_BR_PROB_BASE)
2120 taken_edge->probability = REG_BR_PROB_BASE;
2122 else
2123 taken_edge = EDGE_SUCC (bb, 0);
2125 bsi_remove (&bsi);
2126 taken_edge->flags = EDGE_FALLTHRU;
2128 /* We removed some paths from the cfg. */
2129 free_dominance_info (CDI_DOMINATORS);
2131 return retval;
2135 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2136 predicate VAL, return the edge that will be taken out of the block.
2137 If VAL does not match a unique edge, NULL is returned. */
2139 edge
2140 find_taken_edge (basic_block bb, tree val)
2142 tree stmt;
2144 stmt = last_stmt (bb);
2146 gcc_assert (stmt);
2147 gcc_assert (is_ctrl_stmt (stmt));
2148 gcc_assert (val);
2150 /* If VAL is a predicate of the form N RELOP N, where N is an
2151 SSA_NAME, we can usually determine its truth value. */
2152 if (COMPARISON_CLASS_P (val))
2153 val = fold (val);
2155 /* If VAL is not a constant, we can't determine which edge might
2156 be taken. */
2157 if (!really_constant_p (val))
2158 return NULL;
2160 if (TREE_CODE (stmt) == COND_EXPR)
2161 return find_taken_edge_cond_expr (bb, val);
2163 if (TREE_CODE (stmt) == SWITCH_EXPR)
2164 return find_taken_edge_switch_expr (bb, val);
2166 gcc_unreachable ();
2170 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2171 statement, determine which of the two edges will be taken out of the
2172 block. Return NULL if either edge may be taken. */
2174 static edge
2175 find_taken_edge_cond_expr (basic_block bb, tree val)
2177 edge true_edge, false_edge;
2179 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2181 /* Otherwise, try to determine which branch of the if() will be taken.
2182 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2183 we don't really know which edge will be taken at runtime. This
2184 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2185 if (integer_nonzerop (val))
2186 return true_edge;
2187 else if (integer_zerop (val))
2188 return false_edge;
2189 else
2190 return NULL;
2194 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2195 statement, determine which edge will be taken out of the block. Return
2196 NULL if any edge may be taken. */
2198 static edge
2199 find_taken_edge_switch_expr (basic_block bb, tree val)
2201 tree switch_expr, taken_case;
2202 basic_block dest_bb;
2203 edge e;
2205 if (TREE_CODE (val) != INTEGER_CST)
2206 return NULL;
2208 switch_expr = last_stmt (bb);
2209 taken_case = find_case_label_for_value (switch_expr, val);
2210 dest_bb = label_to_block (CASE_LABEL (taken_case));
2212 e = find_edge (bb, dest_bb);
2213 gcc_assert (e);
2214 return e;
2218 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2219 We can make optimal use here of the fact that the case labels are
2220 sorted: We can do a binary search for a case matching VAL. */
2222 static tree
2223 find_case_label_for_value (tree switch_expr, tree val)
2225 tree vec = SWITCH_LABELS (switch_expr);
2226 size_t low, high, n = TREE_VEC_LENGTH (vec);
2227 tree default_case = TREE_VEC_ELT (vec, n - 1);
2229 for (low = -1, high = n - 1; high - low > 1; )
2231 size_t i = (high + low) / 2;
2232 tree t = TREE_VEC_ELT (vec, i);
2233 int cmp;
2235 /* Cache the result of comparing CASE_LOW and val. */
2236 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2238 if (cmp > 0)
2239 high = i;
2240 else
2241 low = i;
2243 if (CASE_HIGH (t) == NULL)
2245 /* A singe-valued case label. */
2246 if (cmp == 0)
2247 return t;
2249 else
2251 /* A case range. We can only handle integer ranges. */
2252 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2253 return t;
2257 return default_case;
2261 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2262 those alternatives are equal in each of the PHI nodes, then return
2263 true, else return false. */
2265 static bool
2266 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2268 int n1 = e1->dest_idx;
2269 int n2 = e2->dest_idx;
2270 tree phi;
2272 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2274 tree val1 = PHI_ARG_DEF (phi, n1);
2275 tree val2 = PHI_ARG_DEF (phi, n2);
2277 gcc_assert (val1 != NULL_TREE);
2278 gcc_assert (val2 != NULL_TREE);
2280 if (!operand_equal_for_phi_arg_p (val1, val2))
2281 return false;
2284 return true;
2288 /*---------------------------------------------------------------------------
2289 Debugging functions
2290 ---------------------------------------------------------------------------*/
2292 /* Dump tree-specific information of block BB to file OUTF. */
2294 void
2295 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2297 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2301 /* Dump a basic block on stderr. */
2303 void
2304 debug_tree_bb (basic_block bb)
2306 dump_bb (bb, stderr, 0);
2310 /* Dump basic block with index N on stderr. */
2312 basic_block
2313 debug_tree_bb_n (int n)
2315 debug_tree_bb (BASIC_BLOCK (n));
2316 return BASIC_BLOCK (n);
2320 /* Dump the CFG on stderr.
2322 FLAGS are the same used by the tree dumping functions
2323 (see TDF_* in tree.h). */
2325 void
2326 debug_tree_cfg (int flags)
2328 dump_tree_cfg (stderr, flags);
2332 /* Dump the program showing basic block boundaries on the given FILE.
2334 FLAGS are the same used by the tree dumping functions (see TDF_* in
2335 tree.h). */
2337 void
2338 dump_tree_cfg (FILE *file, int flags)
2340 if (flags & TDF_DETAILS)
2342 const char *funcname
2343 = lang_hooks.decl_printable_name (current_function_decl, 2);
2345 fputc ('\n', file);
2346 fprintf (file, ";; Function %s\n\n", funcname);
2347 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2348 n_basic_blocks, n_edges, last_basic_block);
2350 brief_dump_cfg (file);
2351 fprintf (file, "\n");
2354 if (flags & TDF_STATS)
2355 dump_cfg_stats (file);
2357 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2361 /* Dump CFG statistics on FILE. */
2363 void
2364 dump_cfg_stats (FILE *file)
2366 static long max_num_merged_labels = 0;
2367 unsigned long size, total = 0;
2368 int n_edges;
2369 basic_block bb;
2370 const char * const fmt_str = "%-30s%-13s%12s\n";
2371 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2372 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2373 const char *funcname
2374 = lang_hooks.decl_printable_name (current_function_decl, 2);
2377 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2379 fprintf (file, "---------------------------------------------------------\n");
2380 fprintf (file, fmt_str, "", " Number of ", "Memory");
2381 fprintf (file, fmt_str, "", " instances ", "used ");
2382 fprintf (file, "---------------------------------------------------------\n");
2384 size = n_basic_blocks * sizeof (struct basic_block_def);
2385 total += size;
2386 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2387 SCALE (size), LABEL (size));
2389 n_edges = 0;
2390 FOR_EACH_BB (bb)
2391 n_edges += EDGE_COUNT (bb->succs);
2392 size = n_edges * sizeof (struct edge_def);
2393 total += size;
2394 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2396 size = n_basic_blocks * sizeof (struct bb_ann_d);
2397 total += size;
2398 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2399 SCALE (size), LABEL (size));
2401 fprintf (file, "---------------------------------------------------------\n");
2402 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2403 LABEL (total));
2404 fprintf (file, "---------------------------------------------------------\n");
2405 fprintf (file, "\n");
2407 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2408 max_num_merged_labels = cfg_stats.num_merged_labels;
2410 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2411 cfg_stats.num_merged_labels, max_num_merged_labels);
2413 fprintf (file, "\n");
2417 /* Dump CFG statistics on stderr. Keep extern so that it's always
2418 linked in the final executable. */
2420 void
2421 debug_cfg_stats (void)
2423 dump_cfg_stats (stderr);
2427 /* Dump the flowgraph to a .vcg FILE. */
2429 static void
2430 tree_cfg2vcg (FILE *file)
2432 edge e;
2433 edge_iterator ei;
2434 basic_block bb;
2435 const char *funcname
2436 = lang_hooks.decl_printable_name (current_function_decl, 2);
2438 /* Write the file header. */
2439 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2440 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2441 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2443 /* Write blocks and edges. */
2444 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2446 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2447 e->dest->index);
2449 if (e->flags & EDGE_FAKE)
2450 fprintf (file, " linestyle: dotted priority: 10");
2451 else
2452 fprintf (file, " linestyle: solid priority: 100");
2454 fprintf (file, " }\n");
2456 fputc ('\n', file);
2458 FOR_EACH_BB (bb)
2460 enum tree_code head_code, end_code;
2461 const char *head_name, *end_name;
2462 int head_line = 0;
2463 int end_line = 0;
2464 tree first = first_stmt (bb);
2465 tree last = last_stmt (bb);
2467 if (first)
2469 head_code = TREE_CODE (first);
2470 head_name = tree_code_name[head_code];
2471 head_line = get_lineno (first);
2473 else
2474 head_name = "no-statement";
2476 if (last)
2478 end_code = TREE_CODE (last);
2479 end_name = tree_code_name[end_code];
2480 end_line = get_lineno (last);
2482 else
2483 end_name = "no-statement";
2485 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2486 bb->index, bb->index, head_name, head_line, end_name,
2487 end_line);
2489 FOR_EACH_EDGE (e, ei, bb->succs)
2491 if (e->dest == EXIT_BLOCK_PTR)
2492 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2493 else
2494 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2496 if (e->flags & EDGE_FAKE)
2497 fprintf (file, " priority: 10 linestyle: dotted");
2498 else
2499 fprintf (file, " priority: 100 linestyle: solid");
2501 fprintf (file, " }\n");
2504 if (bb->next_bb != EXIT_BLOCK_PTR)
2505 fputc ('\n', file);
2508 fputs ("}\n\n", file);
2513 /*---------------------------------------------------------------------------
2514 Miscellaneous helpers
2515 ---------------------------------------------------------------------------*/
2517 /* Return true if T represents a stmt that always transfers control. */
2519 bool
2520 is_ctrl_stmt (tree t)
2522 return (TREE_CODE (t) == COND_EXPR
2523 || TREE_CODE (t) == SWITCH_EXPR
2524 || TREE_CODE (t) == GOTO_EXPR
2525 || TREE_CODE (t) == RETURN_EXPR
2526 || TREE_CODE (t) == RESX_EXPR);
2530 /* Return true if T is a statement that may alter the flow of control
2531 (e.g., a call to a non-returning function). */
2533 bool
2534 is_ctrl_altering_stmt (tree t)
2536 tree call;
2538 gcc_assert (t);
2539 call = get_call_expr_in (t);
2540 if (call)
2542 /* A non-pure/const CALL_EXPR alters flow control if the current
2543 function has nonlocal labels. */
2544 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2545 return true;
2547 /* A CALL_EXPR also alters control flow if it does not return. */
2548 if (call_expr_flags (call) & ECF_NORETURN)
2549 return true;
2552 /* If a statement can throw, it alters control flow. */
2553 return tree_can_throw_internal (t);
2557 /* Return true if T is a computed goto. */
2559 bool
2560 computed_goto_p (tree t)
2562 return (TREE_CODE (t) == GOTO_EXPR
2563 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2567 /* Checks whether EXPR is a simple local goto. */
2569 bool
2570 simple_goto_p (tree expr)
2572 return (TREE_CODE (expr) == GOTO_EXPR
2573 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2577 /* Return true if T should start a new basic block. PREV_T is the
2578 statement preceding T. It is used when T is a label or a case label.
2579 Labels should only start a new basic block if their previous statement
2580 wasn't a label. Otherwise, sequence of labels would generate
2581 unnecessary basic blocks that only contain a single label. */
2583 static inline bool
2584 stmt_starts_bb_p (tree t, tree prev_t)
2586 enum tree_code code;
2588 if (t == NULL_TREE)
2589 return false;
2591 /* LABEL_EXPRs start a new basic block only if the preceding
2592 statement wasn't a label of the same type. This prevents the
2593 creation of consecutive blocks that have nothing but a single
2594 label. */
2595 code = TREE_CODE (t);
2596 if (code == LABEL_EXPR)
2598 /* Nonlocal and computed GOTO targets always start a new block. */
2599 if (code == LABEL_EXPR
2600 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2601 || FORCED_LABEL (LABEL_EXPR_LABEL (t))))
2602 return true;
2604 if (prev_t && TREE_CODE (prev_t) == code)
2606 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2607 return true;
2609 cfg_stats.num_merged_labels++;
2610 return false;
2612 else
2613 return true;
2616 return false;
2620 /* Return true if T should end a basic block. */
2622 bool
2623 stmt_ends_bb_p (tree t)
2625 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2629 /* Add gotos that used to be represented implicitly in the CFG. */
2631 void
2632 disband_implicit_edges (void)
2634 basic_block bb;
2635 block_stmt_iterator last;
2636 edge e;
2637 edge_iterator ei;
2638 tree stmt, label;
2640 FOR_EACH_BB (bb)
2642 last = bsi_last (bb);
2643 stmt = last_stmt (bb);
2645 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2647 /* Remove superfluous gotos from COND_EXPR branches. Moved
2648 from cfg_remove_useless_stmts here since it violates the
2649 invariants for tree--cfg correspondence and thus fits better
2650 here where we do it anyway. */
2651 e = find_edge (bb, bb->next_bb);
2652 if (e)
2654 if (e->flags & EDGE_TRUE_VALUE)
2655 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2656 else if (e->flags & EDGE_FALSE_VALUE)
2657 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2658 else
2659 gcc_unreachable ();
2660 e->flags |= EDGE_FALLTHRU;
2663 continue;
2666 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2668 /* Remove the RETURN_EXPR if we may fall though to the exit
2669 instead. */
2670 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2671 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
2673 if (bb->next_bb == EXIT_BLOCK_PTR
2674 && !TREE_OPERAND (stmt, 0))
2676 bsi_remove (&last);
2677 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
2679 continue;
2682 /* There can be no fallthru edge if the last statement is a control
2683 one. */
2684 if (stmt && is_ctrl_stmt (stmt))
2685 continue;
2687 /* Find a fallthru edge and emit the goto if necessary. */
2688 FOR_EACH_EDGE (e, ei, bb->succs)
2689 if (e->flags & EDGE_FALLTHRU)
2690 break;
2692 if (!e || e->dest == bb->next_bb)
2693 continue;
2695 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2696 label = tree_block_label (e->dest);
2698 stmt = build1 (GOTO_EXPR, void_type_node, label);
2699 #ifdef USE_MAPPED_LOCATION
2700 SET_EXPR_LOCATION (stmt, e->goto_locus);
2701 #else
2702 SET_EXPR_LOCUS (stmt, e->goto_locus);
2703 #endif
2704 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2705 e->flags &= ~EDGE_FALLTHRU;
2709 /* Remove block annotations and other datastructures. */
2711 void
2712 delete_tree_cfg_annotations (void)
2714 basic_block bb;
2715 if (n_basic_blocks > 0)
2716 free_blocks_annotations ();
2718 label_to_block_map = NULL;
2719 free_rbi_pool ();
2720 FOR_EACH_BB (bb)
2721 bb->rbi = NULL;
2725 /* Return the first statement in basic block BB. */
2727 tree
2728 first_stmt (basic_block bb)
2730 block_stmt_iterator i = bsi_start (bb);
2731 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2735 /* Return the last statement in basic block BB. */
2737 tree
2738 last_stmt (basic_block bb)
2740 block_stmt_iterator b = bsi_last (bb);
2741 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2745 /* Return a pointer to the last statement in block BB. */
2747 tree *
2748 last_stmt_ptr (basic_block bb)
2750 block_stmt_iterator last = bsi_last (bb);
2751 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2755 /* Return the last statement of an otherwise empty block. Return NULL
2756 if the block is totally empty, or if it contains more than one
2757 statement. */
2759 tree
2760 last_and_only_stmt (basic_block bb)
2762 block_stmt_iterator i = bsi_last (bb);
2763 tree last, prev;
2765 if (bsi_end_p (i))
2766 return NULL_TREE;
2768 last = bsi_stmt (i);
2769 bsi_prev (&i);
2770 if (bsi_end_p (i))
2771 return last;
2773 /* Empty statements should no longer appear in the instruction stream.
2774 Everything that might have appeared before should be deleted by
2775 remove_useless_stmts, and the optimizers should just bsi_remove
2776 instead of smashing with build_empty_stmt.
2778 Thus the only thing that should appear here in a block containing
2779 one executable statement is a label. */
2780 prev = bsi_stmt (i);
2781 if (TREE_CODE (prev) == LABEL_EXPR)
2782 return last;
2783 else
2784 return NULL_TREE;
2788 /* Mark BB as the basic block holding statement T. */
2790 void
2791 set_bb_for_stmt (tree t, basic_block bb)
2793 if (TREE_CODE (t) == PHI_NODE)
2794 PHI_BB (t) = bb;
2795 else if (TREE_CODE (t) == STATEMENT_LIST)
2797 tree_stmt_iterator i;
2798 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2799 set_bb_for_stmt (tsi_stmt (i), bb);
2801 else
2803 stmt_ann_t ann = get_stmt_ann (t);
2804 ann->bb = bb;
2806 /* If the statement is a label, add the label to block-to-labels map
2807 so that we can speed up edge creation for GOTO_EXPRs. */
2808 if (TREE_CODE (t) == LABEL_EXPR)
2810 int uid;
2812 t = LABEL_EXPR_LABEL (t);
2813 uid = LABEL_DECL_UID (t);
2814 if (uid == -1)
2816 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2817 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2818 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2820 else
2821 /* We're moving an existing label. Make sure that we've
2822 removed it from the old block. */
2823 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2824 VARRAY_BB (label_to_block_map, uid) = bb;
2829 /* Finds iterator for STMT. */
2831 extern block_stmt_iterator
2832 bsi_for_stmt (tree stmt)
2834 block_stmt_iterator bsi;
2836 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2837 if (bsi_stmt (bsi) == stmt)
2838 return bsi;
2840 gcc_unreachable ();
2843 /* Insert statement (or statement list) T before the statement
2844 pointed-to by iterator I. M specifies how to update iterator I
2845 after insertion (see enum bsi_iterator_update). */
2847 void
2848 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2850 set_bb_for_stmt (t, i->bb);
2851 tsi_link_before (&i->tsi, t, m);
2852 modify_stmt (t);
2856 /* Insert statement (or statement list) T after the statement
2857 pointed-to by iterator I. M specifies how to update iterator I
2858 after insertion (see enum bsi_iterator_update). */
2860 void
2861 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2863 set_bb_for_stmt (t, i->bb);
2864 tsi_link_after (&i->tsi, t, m);
2865 modify_stmt (t);
2869 /* Remove the statement pointed to by iterator I. The iterator is updated
2870 to the next statement. */
2872 void
2873 bsi_remove (block_stmt_iterator *i)
2875 tree t = bsi_stmt (*i);
2876 set_bb_for_stmt (t, NULL);
2877 tsi_delink (&i->tsi);
2881 /* Move the statement at FROM so it comes right after the statement at TO. */
2883 void
2884 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2886 tree stmt = bsi_stmt (*from);
2887 bsi_remove (from);
2888 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2892 /* Move the statement at FROM so it comes right before the statement at TO. */
2894 void
2895 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2897 tree stmt = bsi_stmt (*from);
2898 bsi_remove (from);
2899 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2903 /* Move the statement at FROM to the end of basic block BB. */
2905 void
2906 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2908 block_stmt_iterator last = bsi_last (bb);
2910 /* Have to check bsi_end_p because it could be an empty block. */
2911 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2912 bsi_move_before (from, &last);
2913 else
2914 bsi_move_after (from, &last);
2918 /* Replace the contents of the statement pointed to by iterator BSI
2919 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2920 information of the original statement is preserved. */
2922 void
2923 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2925 int eh_region;
2926 tree orig_stmt = bsi_stmt (*bsi);
2928 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2929 set_bb_for_stmt (stmt, bsi->bb);
2931 /* Preserve EH region information from the original statement, if
2932 requested by the caller. */
2933 if (preserve_eh_info)
2935 eh_region = lookup_stmt_eh_region (orig_stmt);
2936 if (eh_region >= 0)
2937 add_stmt_to_eh_region (stmt, eh_region);
2940 *bsi_stmt_ptr (*bsi) = stmt;
2941 modify_stmt (stmt);
2945 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2946 is made to place the statement in an existing basic block, but
2947 sometimes that isn't possible. When it isn't possible, the edge is
2948 split and the statement is added to the new block.
2950 In all cases, the returned *BSI points to the correct location. The
2951 return value is true if insertion should be done after the location,
2952 or false if it should be done before the location. If new basic block
2953 has to be created, it is stored in *NEW_BB. */
2955 static bool
2956 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2957 basic_block *new_bb)
2959 basic_block dest, src;
2960 tree tmp;
2962 dest = e->dest;
2963 restart:
2965 /* If the destination has one predecessor which has no PHI nodes,
2966 insert there. Except for the exit block.
2968 The requirement for no PHI nodes could be relaxed. Basically we
2969 would have to examine the PHIs to prove that none of them used
2970 the value set by the statement we want to insert on E. That
2971 hardly seems worth the effort. */
2972 if (EDGE_COUNT (dest->preds) == 1
2973 && ! phi_nodes (dest)
2974 && dest != EXIT_BLOCK_PTR)
2976 *bsi = bsi_start (dest);
2977 if (bsi_end_p (*bsi))
2978 return true;
2980 /* Make sure we insert after any leading labels. */
2981 tmp = bsi_stmt (*bsi);
2982 while (TREE_CODE (tmp) == LABEL_EXPR)
2984 bsi_next (bsi);
2985 if (bsi_end_p (*bsi))
2986 break;
2987 tmp = bsi_stmt (*bsi);
2990 if (bsi_end_p (*bsi))
2992 *bsi = bsi_last (dest);
2993 return true;
2995 else
2996 return false;
2999 /* If the source has one successor, the edge is not abnormal and
3000 the last statement does not end a basic block, insert there.
3001 Except for the entry block. */
3002 src = e->src;
3003 if ((e->flags & EDGE_ABNORMAL) == 0
3004 && EDGE_COUNT (src->succs) == 1
3005 && src != ENTRY_BLOCK_PTR)
3007 *bsi = bsi_last (src);
3008 if (bsi_end_p (*bsi))
3009 return true;
3011 tmp = bsi_stmt (*bsi);
3012 if (!stmt_ends_bb_p (tmp))
3013 return true;
3015 /* Insert code just before returning the value. We may need to decompose
3016 the return in the case it contains non-trivial operand. */
3017 if (TREE_CODE (tmp) == RETURN_EXPR)
3019 tree op = TREE_OPERAND (tmp, 0);
3020 if (!is_gimple_val (op))
3022 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3023 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3024 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3026 bsi_prev (bsi);
3027 return true;
3031 /* Otherwise, create a new basic block, and split this edge. */
3032 dest = split_edge (e);
3033 if (new_bb)
3034 *new_bb = dest;
3035 e = EDGE_PRED (dest, 0);
3036 goto restart;
3040 /* This routine will commit all pending edge insertions, creating any new
3041 basic blocks which are necessary. */
3043 void
3044 bsi_commit_edge_inserts (void)
3046 basic_block bb;
3047 edge e;
3048 edge_iterator ei;
3050 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL);
3052 FOR_EACH_BB (bb)
3053 FOR_EACH_EDGE (e, ei, bb->succs)
3054 bsi_commit_one_edge_insert (e, NULL);
3058 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3059 to this block, otherwise set it to NULL. */
3061 void
3062 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3064 if (new_bb)
3065 *new_bb = NULL;
3066 if (PENDING_STMT (e))
3068 block_stmt_iterator bsi;
3069 tree stmt = PENDING_STMT (e);
3071 PENDING_STMT (e) = NULL_TREE;
3073 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3081 /* Add STMT to the pending list of edge E. No actual insertion is
3082 made until a call to bsi_commit_edge_inserts () is made. */
3084 void
3085 bsi_insert_on_edge (edge e, tree stmt)
3087 append_to_statement_list (stmt, &PENDING_STMT (e));
3090 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3091 block has to be created, it is returned. */
3093 basic_block
3094 bsi_insert_on_edge_immediate (edge e, tree stmt)
3096 block_stmt_iterator bsi;
3097 basic_block new_bb = NULL;
3099 gcc_assert (!PENDING_STMT (e));
3101 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3102 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3103 else
3104 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3106 return new_bb;
3109 /*---------------------------------------------------------------------------
3110 Tree specific functions for CFG manipulation
3111 ---------------------------------------------------------------------------*/
3113 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3115 static void
3116 reinstall_phi_args (edge new_edge, edge old_edge)
3118 tree var, phi;
3120 if (!PENDING_STMT (old_edge))
3121 return;
3123 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3124 var && phi;
3125 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3127 tree result = TREE_PURPOSE (var);
3128 tree arg = TREE_VALUE (var);
3130 gcc_assert (result == PHI_RESULT (phi));
3132 add_phi_arg (phi, arg, new_edge);
3135 PENDING_STMT (old_edge) = NULL;
3138 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3139 Abort on abnormal edges. */
3141 static basic_block
3142 tree_split_edge (edge edge_in)
3144 basic_block new_bb, after_bb, dest, src;
3145 edge new_edge, e;
3147 /* Abnormal edges cannot be split. */
3148 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3150 src = edge_in->src;
3151 dest = edge_in->dest;
3153 /* Place the new block in the block list. Try to keep the new block
3154 near its "logical" location. This is of most help to humans looking
3155 at debugging dumps. */
3156 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3157 after_bb = edge_in->src;
3158 else
3159 after_bb = dest->prev_bb;
3161 new_bb = create_empty_bb (after_bb);
3162 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3163 new_bb->count = edge_in->count;
3164 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3165 new_edge->probability = REG_BR_PROB_BASE;
3166 new_edge->count = edge_in->count;
3168 e = redirect_edge_and_branch (edge_in, new_bb);
3169 gcc_assert (e);
3170 reinstall_phi_args (new_edge, e);
3172 return new_bb;
3176 /* Return true when BB has label LABEL in it. */
3178 static bool
3179 has_label_p (basic_block bb, tree label)
3181 block_stmt_iterator bsi;
3183 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3185 tree stmt = bsi_stmt (bsi);
3187 if (TREE_CODE (stmt) != LABEL_EXPR)
3188 return false;
3189 if (LABEL_EXPR_LABEL (stmt) == label)
3190 return true;
3192 return false;
3196 /* Callback for walk_tree, check that all elements with address taken are
3197 properly noticed as such. */
3199 static tree
3200 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3202 tree t = *tp, x;
3204 if (TYPE_P (t))
3205 *walk_subtrees = 0;
3207 /* Check operand N for being valid GIMPLE and give error MSG if not.
3208 We check for constants explicitly since they are not considered
3209 gimple invariants if they overflowed. */
3210 #define CHECK_OP(N, MSG) \
3211 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3212 && !is_gimple_val (TREE_OPERAND (t, N))) \
3213 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3215 switch (TREE_CODE (t))
3217 case SSA_NAME:
3218 if (SSA_NAME_IN_FREE_LIST (t))
3220 error ("SSA name in freelist but still referenced");
3221 return *tp;
3223 break;
3225 case MODIFY_EXPR:
3226 x = TREE_OPERAND (t, 0);
3227 if (TREE_CODE (x) == BIT_FIELD_REF
3228 && is_gimple_reg (TREE_OPERAND (x, 0)))
3230 error ("GIMPLE register modified with BIT_FIELD_REF");
3231 return t;
3233 break;
3235 case ADDR_EXPR:
3236 /* Skip any references (they will be checked when we recurse down the
3237 tree) and ensure that any variable used as a prefix is marked
3238 addressable. */
3239 for (x = TREE_OPERAND (t, 0);
3240 handled_component_p (x);
3241 x = TREE_OPERAND (x, 0))
3244 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3245 return NULL;
3246 if (!TREE_ADDRESSABLE (x))
3248 error ("address taken, but ADDRESSABLE bit not set");
3249 return x;
3251 break;
3253 case COND_EXPR:
3254 x = COND_EXPR_COND (t);
3255 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3257 error ("non-boolean used in condition");
3258 return x;
3260 break;
3262 case NOP_EXPR:
3263 case CONVERT_EXPR:
3264 case FIX_TRUNC_EXPR:
3265 case FIX_CEIL_EXPR:
3266 case FIX_FLOOR_EXPR:
3267 case FIX_ROUND_EXPR:
3268 case FLOAT_EXPR:
3269 case NEGATE_EXPR:
3270 case ABS_EXPR:
3271 case BIT_NOT_EXPR:
3272 case NON_LVALUE_EXPR:
3273 case TRUTH_NOT_EXPR:
3274 CHECK_OP (0, "Invalid operand to unary operator");
3275 break;
3277 case REALPART_EXPR:
3278 case IMAGPART_EXPR:
3279 case COMPONENT_REF:
3280 case ARRAY_REF:
3281 case ARRAY_RANGE_REF:
3282 case BIT_FIELD_REF:
3283 case VIEW_CONVERT_EXPR:
3284 /* We have a nest of references. Verify that each of the operands
3285 that determine where to reference is either a constant or a variable,
3286 verify that the base is valid, and then show we've already checked
3287 the subtrees. */
3288 while (handled_component_p (t))
3290 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3291 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3292 else if (TREE_CODE (t) == ARRAY_REF
3293 || TREE_CODE (t) == ARRAY_RANGE_REF)
3295 CHECK_OP (1, "Invalid array index.");
3296 if (TREE_OPERAND (t, 2))
3297 CHECK_OP (2, "Invalid array lower bound.");
3298 if (TREE_OPERAND (t, 3))
3299 CHECK_OP (3, "Invalid array stride.");
3301 else if (TREE_CODE (t) == BIT_FIELD_REF)
3303 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3304 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3307 t = TREE_OPERAND (t, 0);
3310 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3312 error ("Invalid reference prefix.");
3313 return t;
3315 *walk_subtrees = 0;
3316 break;
3318 case LT_EXPR:
3319 case LE_EXPR:
3320 case GT_EXPR:
3321 case GE_EXPR:
3322 case EQ_EXPR:
3323 case NE_EXPR:
3324 case UNORDERED_EXPR:
3325 case ORDERED_EXPR:
3326 case UNLT_EXPR:
3327 case UNLE_EXPR:
3328 case UNGT_EXPR:
3329 case UNGE_EXPR:
3330 case UNEQ_EXPR:
3331 case LTGT_EXPR:
3332 case PLUS_EXPR:
3333 case MINUS_EXPR:
3334 case MULT_EXPR:
3335 case TRUNC_DIV_EXPR:
3336 case CEIL_DIV_EXPR:
3337 case FLOOR_DIV_EXPR:
3338 case ROUND_DIV_EXPR:
3339 case TRUNC_MOD_EXPR:
3340 case CEIL_MOD_EXPR:
3341 case FLOOR_MOD_EXPR:
3342 case ROUND_MOD_EXPR:
3343 case RDIV_EXPR:
3344 case EXACT_DIV_EXPR:
3345 case MIN_EXPR:
3346 case MAX_EXPR:
3347 case LSHIFT_EXPR:
3348 case RSHIFT_EXPR:
3349 case LROTATE_EXPR:
3350 case RROTATE_EXPR:
3351 case BIT_IOR_EXPR:
3352 case BIT_XOR_EXPR:
3353 case BIT_AND_EXPR:
3354 CHECK_OP (0, "Invalid operand to binary operator");
3355 CHECK_OP (1, "Invalid operand to binary operator");
3356 break;
3358 default:
3359 break;
3361 return NULL;
3363 #undef CHECK_OP
3367 /* Verify STMT, return true if STMT is not in GIMPLE form.
3368 TODO: Implement type checking. */
3370 static bool
3371 verify_stmt (tree stmt, bool last_in_block)
3373 tree addr;
3375 if (!is_gimple_stmt (stmt))
3377 error ("Is not a valid GIMPLE statement.");
3378 goto fail;
3381 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3382 if (addr)
3384 debug_generic_stmt (addr);
3385 return true;
3388 /* If the statement is marked as part of an EH region, then it is
3389 expected that the statement could throw. Verify that when we
3390 have optimizations that simplify statements such that we prove
3391 that they cannot throw, that we update other data structures
3392 to match. */
3393 if (lookup_stmt_eh_region (stmt) >= 0)
3395 if (!tree_could_throw_p (stmt))
3397 error ("Statement marked for throw, but doesn%'t.");
3398 goto fail;
3400 if (!last_in_block && tree_can_throw_internal (stmt))
3402 error ("Statement marked for throw in middle of block.");
3403 goto fail;
3407 return false;
3409 fail:
3410 debug_generic_stmt (stmt);
3411 return true;
3415 /* Return true when the T can be shared. */
3417 static bool
3418 tree_node_can_be_shared (tree t)
3420 if (IS_TYPE_OR_DECL_P (t)
3421 /* We check for constants explicitly since they are not considered
3422 gimple invariants if they overflowed. */
3423 || CONSTANT_CLASS_P (t)
3424 || is_gimple_min_invariant (t)
3425 || TREE_CODE (t) == SSA_NAME
3426 || t == error_mark_node)
3427 return true;
3429 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3430 return true;
3432 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3433 /* We check for constants explicitly since they are not considered
3434 gimple invariants if they overflowed. */
3435 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3436 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3437 || (TREE_CODE (t) == COMPONENT_REF
3438 || TREE_CODE (t) == REALPART_EXPR
3439 || TREE_CODE (t) == IMAGPART_EXPR))
3440 t = TREE_OPERAND (t, 0);
3442 if (DECL_P (t))
3443 return true;
3445 return false;
3449 /* Called via walk_trees. Verify tree sharing. */
3451 static tree
3452 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3454 htab_t htab = (htab_t) data;
3455 void **slot;
3457 if (tree_node_can_be_shared (*tp))
3459 *walk_subtrees = false;
3460 return NULL;
3463 slot = htab_find_slot (htab, *tp, INSERT);
3464 if (*slot)
3465 return *slot;
3466 *slot = *tp;
3468 return NULL;
3472 /* Verify the GIMPLE statement chain. */
3474 void
3475 verify_stmts (void)
3477 basic_block bb;
3478 block_stmt_iterator bsi;
3479 bool err = false;
3480 htab_t htab;
3481 tree addr;
3483 timevar_push (TV_TREE_STMT_VERIFY);
3484 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3486 FOR_EACH_BB (bb)
3488 tree phi;
3489 int i;
3491 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3493 int phi_num_args = PHI_NUM_ARGS (phi);
3495 for (i = 0; i < phi_num_args; i++)
3497 tree t = PHI_ARG_DEF (phi, i);
3498 tree addr;
3500 /* Addressable variables do have SSA_NAMEs but they
3501 are not considered gimple values. */
3502 if (TREE_CODE (t) != SSA_NAME
3503 && TREE_CODE (t) != FUNCTION_DECL
3504 && !is_gimple_val (t))
3506 error ("PHI def is not a GIMPLE value");
3507 debug_generic_stmt (phi);
3508 debug_generic_stmt (t);
3509 err |= true;
3512 addr = walk_tree (&t, verify_expr, NULL, NULL);
3513 if (addr)
3515 debug_generic_stmt (addr);
3516 err |= true;
3519 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3520 if (addr)
3522 error ("Incorrect sharing of tree nodes");
3523 debug_generic_stmt (phi);
3524 debug_generic_stmt (addr);
3525 err |= true;
3530 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3532 tree stmt = bsi_stmt (bsi);
3533 bsi_next (&bsi);
3534 err |= verify_stmt (stmt, bsi_end_p (bsi));
3535 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3536 if (addr)
3538 error ("Incorrect sharing of tree nodes");
3539 debug_generic_stmt (stmt);
3540 debug_generic_stmt (addr);
3541 err |= true;
3546 if (err)
3547 internal_error ("verify_stmts failed.");
3549 htab_delete (htab);
3550 timevar_pop (TV_TREE_STMT_VERIFY);
3554 /* Verifies that the flow information is OK. */
3556 static int
3557 tree_verify_flow_info (void)
3559 int err = 0;
3560 basic_block bb;
3561 block_stmt_iterator bsi;
3562 tree stmt;
3563 edge e;
3564 edge_iterator ei;
3566 if (ENTRY_BLOCK_PTR->stmt_list)
3568 error ("ENTRY_BLOCK has a statement list associated with it\n");
3569 err = 1;
3572 if (EXIT_BLOCK_PTR->stmt_list)
3574 error ("EXIT_BLOCK has a statement list associated with it\n");
3575 err = 1;
3578 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3579 if (e->flags & EDGE_FALLTHRU)
3581 error ("Fallthru to exit from bb %d\n", e->src->index);
3582 err = 1;
3585 FOR_EACH_BB (bb)
3587 bool found_ctrl_stmt = false;
3589 /* Skip labels on the start of basic block. */
3590 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3592 if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR)
3593 break;
3595 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb)
3597 tree stmt = bsi_stmt (bsi);
3598 error ("Label %s to block does not match in bb %d\n",
3599 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3600 bb->index);
3601 err = 1;
3604 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi)))
3605 != current_function_decl)
3607 tree stmt = bsi_stmt (bsi);
3608 error ("Label %s has incorrect context in bb %d\n",
3609 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3610 bb->index);
3611 err = 1;
3615 /* Verify that body of basic block BB is free of control flow. */
3616 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3618 tree stmt = bsi_stmt (bsi);
3620 if (found_ctrl_stmt)
3622 error ("Control flow in the middle of basic block %d\n",
3623 bb->index);
3624 err = 1;
3627 if (stmt_ends_bb_p (stmt))
3628 found_ctrl_stmt = true;
3630 if (TREE_CODE (stmt) == LABEL_EXPR)
3632 error ("Label %s in the middle of basic block %d\n",
3633 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3634 bb->index);
3635 err = 1;
3638 bsi = bsi_last (bb);
3639 if (bsi_end_p (bsi))
3640 continue;
3642 stmt = bsi_stmt (bsi);
3644 if (is_ctrl_stmt (stmt))
3646 FOR_EACH_EDGE (e, ei, bb->succs)
3647 if (e->flags & EDGE_FALLTHRU)
3649 error ("Fallthru edge after a control statement in bb %d \n",
3650 bb->index);
3651 err = 1;
3655 switch (TREE_CODE (stmt))
3657 case COND_EXPR:
3659 edge true_edge;
3660 edge false_edge;
3661 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3662 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3664 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3665 err = 1;
3668 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3670 if (!true_edge || !false_edge
3671 || !(true_edge->flags & EDGE_TRUE_VALUE)
3672 || !(false_edge->flags & EDGE_FALSE_VALUE)
3673 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3674 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3675 || EDGE_COUNT (bb->succs) >= 3)
3677 error ("Wrong outgoing edge flags at end of bb %d\n",
3678 bb->index);
3679 err = 1;
3682 if (!has_label_p (true_edge->dest,
3683 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3685 error ("%<then%> label does not match edge at end of bb %d\n",
3686 bb->index);
3687 err = 1;
3690 if (!has_label_p (false_edge->dest,
3691 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3693 error ("%<else%> label does not match edge at end of bb %d\n",
3694 bb->index);
3695 err = 1;
3698 break;
3700 case GOTO_EXPR:
3701 if (simple_goto_p (stmt))
3703 error ("Explicit goto at end of bb %d\n", bb->index);
3704 err = 1;
3706 else
3708 /* FIXME. We should double check that the labels in the
3709 destination blocks have their address taken. */
3710 FOR_EACH_EDGE (e, ei, bb->succs)
3711 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3712 | EDGE_FALSE_VALUE))
3713 || !(e->flags & EDGE_ABNORMAL))
3715 error ("Wrong outgoing edge flags at end of bb %d\n",
3716 bb->index);
3717 err = 1;
3720 break;
3722 case RETURN_EXPR:
3723 if (EDGE_COUNT (bb->succs) != 1
3724 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3725 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3727 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3728 err = 1;
3730 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
3732 error ("Return edge does not point to exit in bb %d\n",
3733 bb->index);
3734 err = 1;
3736 break;
3738 case SWITCH_EXPR:
3740 tree prev;
3741 edge e;
3742 size_t i, n;
3743 tree vec;
3745 vec = SWITCH_LABELS (stmt);
3746 n = TREE_VEC_LENGTH (vec);
3748 /* Mark all the destination basic blocks. */
3749 for (i = 0; i < n; ++i)
3751 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3752 basic_block label_bb = label_to_block (lab);
3754 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3755 label_bb->aux = (void *)1;
3758 /* Verify that the case labels are sorted. */
3759 prev = TREE_VEC_ELT (vec, 0);
3760 for (i = 1; i < n - 1; ++i)
3762 tree c = TREE_VEC_ELT (vec, i);
3763 if (! CASE_LOW (c))
3765 error ("Found default case not at end of case vector");
3766 err = 1;
3767 continue;
3769 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3771 error ("Case labels not sorted:\n ");
3772 print_generic_expr (stderr, prev, 0);
3773 fprintf (stderr," is greater than ");
3774 print_generic_expr (stderr, c, 0);
3775 fprintf (stderr," but comes before it.\n");
3776 err = 1;
3778 prev = c;
3780 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3782 error ("No default case found at end of case vector");
3783 err = 1;
3786 FOR_EACH_EDGE (e, ei, bb->succs)
3788 if (!e->dest->aux)
3790 error ("Extra outgoing edge %d->%d\n",
3791 bb->index, e->dest->index);
3792 err = 1;
3794 e->dest->aux = (void *)2;
3795 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3796 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3798 error ("Wrong outgoing edge flags at end of bb %d\n",
3799 bb->index);
3800 err = 1;
3804 /* Check that we have all of them. */
3805 for (i = 0; i < n; ++i)
3807 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3808 basic_block label_bb = label_to_block (lab);
3810 if (label_bb->aux != (void *)2)
3812 error ("Missing edge %i->%i",
3813 bb->index, label_bb->index);
3814 err = 1;
3818 FOR_EACH_EDGE (e, ei, bb->succs)
3819 e->dest->aux = (void *)0;
3822 default: ;
3826 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3827 verify_dominators (CDI_DOMINATORS);
3829 return err;
3833 /* Updates phi nodes after creating a forwarder block joined
3834 by edge FALLTHRU. */
3836 static void
3837 tree_make_forwarder_block (edge fallthru)
3839 edge e;
3840 edge_iterator ei;
3841 basic_block dummy, bb;
3842 tree phi, new_phi, var;
3844 dummy = fallthru->src;
3845 bb = fallthru->dest;
3847 if (EDGE_COUNT (bb->preds) == 1)
3848 return;
3850 /* If we redirected a branch we must create new phi nodes at the
3851 start of BB. */
3852 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3854 var = PHI_RESULT (phi);
3855 new_phi = create_phi_node (var, bb);
3856 SSA_NAME_DEF_STMT (var) = new_phi;
3857 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3858 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3861 /* Ensure that the PHI node chain is in the same order. */
3862 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3864 /* Add the arguments we have stored on edges. */
3865 FOR_EACH_EDGE (e, ei, bb->preds)
3867 if (e == fallthru)
3868 continue;
3870 flush_pending_stmts (e);
3875 /* Return true if basic block BB does nothing except pass control
3876 flow to another block and that we can safely insert a label at
3877 the start of the successor block.
3879 As a precondition, we require that BB be not equal to
3880 ENTRY_BLOCK_PTR. */
3882 static bool
3883 tree_forwarder_block_p (basic_block bb)
3885 block_stmt_iterator bsi;
3887 /* BB must have a single outgoing edge. */
3888 if (EDGE_COUNT (bb->succs) != 1
3889 /* BB can not have any PHI nodes. This could potentially be
3890 relaxed early in compilation if we re-rewrote the variables
3891 appearing in any PHI nodes in forwarder blocks. */
3892 || phi_nodes (bb)
3893 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3894 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
3895 /* Nor should this be an infinite loop. */
3896 || EDGE_SUCC (bb, 0)->dest == bb
3897 /* BB may not have an abnormal outgoing edge. */
3898 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
3899 return false;
3901 #if ENABLE_CHECKING
3902 gcc_assert (bb != ENTRY_BLOCK_PTR);
3903 #endif
3905 /* Now walk through the statements. We can ignore labels, anything else
3906 means this is not a forwarder block. */
3907 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3909 tree stmt = bsi_stmt (bsi);
3911 switch (TREE_CODE (stmt))
3913 case LABEL_EXPR:
3914 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3915 return false;
3916 break;
3918 default:
3919 return false;
3923 if (find_edge (ENTRY_BLOCK_PTR, bb))
3924 return false;
3926 return true;
3929 /* Return true if BB has at least one abnormal incoming edge. */
3931 static inline bool
3932 has_abnormal_incoming_edge_p (basic_block bb)
3934 edge e;
3935 edge_iterator ei;
3937 FOR_EACH_EDGE (e, ei, bb->preds)
3938 if (e->flags & EDGE_ABNORMAL)
3939 return true;
3941 return false;
3944 /* Removes forwarder block BB. Returns false if this failed. If a new
3945 forwarder block is created due to redirection of edges, it is
3946 stored to worklist. */
3948 static bool
3949 remove_forwarder_block (basic_block bb, basic_block **worklist)
3951 edge succ = EDGE_SUCC (bb, 0), e, s;
3952 basic_block dest = succ->dest;
3953 tree label;
3954 tree phi;
3955 edge_iterator ei;
3956 block_stmt_iterator bsi, bsi_to;
3957 bool seen_abnormal_edge = false;
3959 /* We check for infinite loops already in tree_forwarder_block_p.
3960 However it may happen that the infinite loop is created
3961 afterwards due to removal of forwarders. */
3962 if (dest == bb)
3963 return false;
3965 /* If the destination block consists of an nonlocal label, do not merge
3966 it. */
3967 label = first_stmt (bb);
3968 if (label
3969 && TREE_CODE (label) == LABEL_EXPR
3970 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
3971 return false;
3973 /* If there is an abnormal edge to basic block BB, but not into
3974 dest, problems might occur during removal of the phi node at out
3975 of ssa due to overlapping live ranges of registers.
3977 If there is an abnormal edge in DEST, the problems would occur
3978 anyway since cleanup_dead_labels would then merge the labels for
3979 two different eh regions, and rest of exception handling code
3980 does not like it.
3982 So if there is an abnormal edge to BB, proceed only if there is
3983 no abnormal edge to DEST and there are no phi nodes in DEST. */
3984 if (has_abnormal_incoming_edge_p (bb))
3986 seen_abnormal_edge = true;
3988 if (has_abnormal_incoming_edge_p (dest)
3989 || phi_nodes (dest) != NULL_TREE)
3990 return false;
3993 /* If there are phi nodes in DEST, and some of the blocks that are
3994 predecessors of BB are also predecessors of DEST, check that the
3995 phi node arguments match. */
3996 if (phi_nodes (dest))
3998 FOR_EACH_EDGE (e, ei, bb->preds)
4000 s = find_edge (e->src, dest);
4001 if (!s)
4002 continue;
4004 if (!phi_alternatives_equal (dest, succ, s))
4005 return false;
4009 /* Redirect the edges. */
4010 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4012 if (e->flags & EDGE_ABNORMAL)
4014 /* If there is an abnormal edge, redirect it anyway, and
4015 move the labels to the new block to make it legal. */
4016 s = redirect_edge_succ_nodup (e, dest);
4018 else
4019 s = redirect_edge_and_branch (e, dest);
4021 if (s == e)
4023 /* Create arguments for the phi nodes, since the edge was not
4024 here before. */
4025 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4026 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4028 else
4030 /* The source basic block might become a forwarder. We know
4031 that it was not a forwarder before, since it used to have
4032 at least two outgoing edges, so we may just add it to
4033 worklist. */
4034 if (tree_forwarder_block_p (s->src))
4035 *(*worklist)++ = s->src;
4039 if (seen_abnormal_edge)
4041 /* Move the labels to the new block, so that the redirection of
4042 the abnormal edges works. */
4044 bsi_to = bsi_start (dest);
4045 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4047 label = bsi_stmt (bsi);
4048 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4049 bsi_remove (&bsi);
4050 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4054 /* Update the dominators. */
4055 if (dom_info_available_p (CDI_DOMINATORS))
4057 basic_block dom, dombb, domdest;
4059 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4060 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4061 if (domdest == bb)
4063 /* Shortcut to avoid calling (relatively expensive)
4064 nearest_common_dominator unless necessary. */
4065 dom = dombb;
4067 else
4068 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4070 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4073 /* And kill the forwarder block. */
4074 delete_basic_block (bb);
4076 return true;
4079 /* Removes forwarder blocks. */
4081 static bool
4082 cleanup_forwarder_blocks (void)
4084 basic_block bb;
4085 bool changed = false;
4086 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4087 basic_block *current = worklist;
4089 FOR_EACH_BB (bb)
4091 if (tree_forwarder_block_p (bb))
4092 *current++ = bb;
4095 while (current != worklist)
4097 bb = *--current;
4098 changed |= remove_forwarder_block (bb, &current);
4101 free (worklist);
4102 return changed;
4105 /* Return a non-special label in the head of basic block BLOCK.
4106 Create one if it doesn't exist. */
4108 tree
4109 tree_block_label (basic_block bb)
4111 block_stmt_iterator i, s = bsi_start (bb);
4112 bool first = true;
4113 tree label, stmt;
4115 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4117 stmt = bsi_stmt (i);
4118 if (TREE_CODE (stmt) != LABEL_EXPR)
4119 break;
4120 label = LABEL_EXPR_LABEL (stmt);
4121 if (!DECL_NONLOCAL (label))
4123 if (!first)
4124 bsi_move_before (&i, &s);
4125 return label;
4129 label = create_artificial_label ();
4130 stmt = build1 (LABEL_EXPR, void_type_node, label);
4131 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4132 return label;
4136 /* Attempt to perform edge redirection by replacing a possibly complex
4137 jump instruction by a goto or by removing the jump completely.
4138 This can apply only if all edges now point to the same block. The
4139 parameters and return values are equivalent to
4140 redirect_edge_and_branch. */
4142 static edge
4143 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4145 basic_block src = e->src;
4146 block_stmt_iterator b;
4147 tree stmt;
4149 /* We can replace or remove a complex jump only when we have exactly
4150 two edges. */
4151 if (EDGE_COUNT (src->succs) != 2
4152 /* Verify that all targets will be TARGET. Specifically, the
4153 edge that is not E must also go to TARGET. */
4154 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4155 return NULL;
4157 b = bsi_last (src);
4158 if (bsi_end_p (b))
4159 return NULL;
4160 stmt = bsi_stmt (b);
4162 if (TREE_CODE (stmt) == COND_EXPR
4163 || TREE_CODE (stmt) == SWITCH_EXPR)
4165 bsi_remove (&b);
4166 e = ssa_redirect_edge (e, target);
4167 e->flags = EDGE_FALLTHRU;
4168 return e;
4171 return NULL;
4175 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4176 edge representing the redirected branch. */
4178 static edge
4179 tree_redirect_edge_and_branch (edge e, basic_block dest)
4181 basic_block bb = e->src;
4182 block_stmt_iterator bsi;
4183 edge ret;
4184 tree label, stmt;
4186 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4187 return NULL;
4189 if (e->src != ENTRY_BLOCK_PTR
4190 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4191 return ret;
4193 if (e->dest == dest)
4194 return NULL;
4196 label = tree_block_label (dest);
4198 bsi = bsi_last (bb);
4199 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4201 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4203 case COND_EXPR:
4204 stmt = (e->flags & EDGE_TRUE_VALUE
4205 ? COND_EXPR_THEN (stmt)
4206 : COND_EXPR_ELSE (stmt));
4207 GOTO_DESTINATION (stmt) = label;
4208 break;
4210 case GOTO_EXPR:
4211 /* No non-abnormal edges should lead from a non-simple goto, and
4212 simple ones should be represented implicitly. */
4213 gcc_unreachable ();
4215 case SWITCH_EXPR:
4217 tree cases = get_cases_for_edge (e, stmt);
4219 /* If we have a list of cases associated with E, then use it
4220 as it's a lot faster than walking the entire case vector. */
4221 if (cases)
4223 edge e2 = find_edge (e->src, dest);
4224 tree last, first;
4226 first = cases;
4227 while (cases)
4229 last = cases;
4230 CASE_LABEL (cases) = label;
4231 cases = TREE_CHAIN (cases);
4234 /* If there was already an edge in the CFG, then we need
4235 to move all the cases associated with E to E2. */
4236 if (e2)
4238 tree cases2 = get_cases_for_edge (e2, stmt);
4240 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4241 TREE_CHAIN (cases2) = first;
4244 else
4246 tree vec = SWITCH_LABELS (stmt);
4247 size_t i, n = TREE_VEC_LENGTH (vec);
4249 for (i = 0; i < n; i++)
4251 tree elt = TREE_VEC_ELT (vec, i);
4253 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4254 CASE_LABEL (elt) = label;
4258 break;
4261 case RETURN_EXPR:
4262 bsi_remove (&bsi);
4263 e->flags |= EDGE_FALLTHRU;
4264 break;
4266 default:
4267 /* Otherwise it must be a fallthru edge, and we don't need to
4268 do anything besides redirecting it. */
4269 gcc_assert (e->flags & EDGE_FALLTHRU);
4270 break;
4273 /* Update/insert PHI nodes as necessary. */
4275 /* Now update the edges in the CFG. */
4276 e = ssa_redirect_edge (e, dest);
4278 return e;
4282 /* Simple wrapper, as we can always redirect fallthru edges. */
4284 static basic_block
4285 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4287 e = tree_redirect_edge_and_branch (e, dest);
4288 gcc_assert (e);
4290 return NULL;
4294 /* Splits basic block BB after statement STMT (but at least after the
4295 labels). If STMT is NULL, BB is split just after the labels. */
4297 static basic_block
4298 tree_split_block (basic_block bb, void *stmt)
4300 block_stmt_iterator bsi, bsi_tgt;
4301 tree act;
4302 basic_block new_bb;
4303 edge e;
4304 edge_iterator ei;
4306 new_bb = create_empty_bb (bb);
4308 /* Redirect the outgoing edges. */
4309 new_bb->succs = bb->succs;
4310 bb->succs = NULL;
4311 FOR_EACH_EDGE (e, ei, new_bb->succs)
4312 e->src = new_bb;
4314 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4315 stmt = NULL;
4317 /* Move everything from BSI to the new basic block. */
4318 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4320 act = bsi_stmt (bsi);
4321 if (TREE_CODE (act) == LABEL_EXPR)
4322 continue;
4324 if (!stmt)
4325 break;
4327 if (stmt == act)
4329 bsi_next (&bsi);
4330 break;
4334 bsi_tgt = bsi_start (new_bb);
4335 while (!bsi_end_p (bsi))
4337 act = bsi_stmt (bsi);
4338 bsi_remove (&bsi);
4339 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4342 return new_bb;
4346 /* Moves basic block BB after block AFTER. */
4348 static bool
4349 tree_move_block_after (basic_block bb, basic_block after)
4351 if (bb->prev_bb == after)
4352 return true;
4354 unlink_block (bb);
4355 link_block (bb, after);
4357 return true;
4361 /* Return true if basic_block can be duplicated. */
4363 static bool
4364 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4366 return true;
4369 /* Create a duplicate of the basic block BB. NOTE: This does not
4370 preserve SSA form. */
4372 static basic_block
4373 tree_duplicate_bb (basic_block bb)
4375 basic_block new_bb;
4376 block_stmt_iterator bsi, bsi_tgt;
4377 tree phi, val;
4378 ssa_op_iter op_iter;
4380 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4382 /* First copy the phi nodes. We do not copy phi node arguments here,
4383 since the edges are not ready yet. Keep the chain of phi nodes in
4384 the same order, so that we can add them later. */
4385 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4387 mark_for_rewrite (PHI_RESULT (phi));
4388 create_phi_node (PHI_RESULT (phi), new_bb);
4390 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4392 bsi_tgt = bsi_start (new_bb);
4393 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4395 tree stmt = bsi_stmt (bsi);
4396 tree copy;
4398 if (TREE_CODE (stmt) == LABEL_EXPR)
4399 continue;
4401 /* Record the definitions. */
4402 get_stmt_operands (stmt);
4404 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4405 mark_for_rewrite (val);
4407 copy = unshare_expr (stmt);
4409 /* Copy also the virtual operands. */
4410 get_stmt_ann (copy);
4411 copy_virtual_operands (copy, stmt);
4413 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4416 return new_bb;
4419 /* Basic block BB_COPY was created by code duplication. Add phi node
4420 arguments for edges going out of BB_COPY. The blocks that were
4421 duplicated have rbi->duplicated set to one. */
4423 void
4424 add_phi_args_after_copy_bb (basic_block bb_copy)
4426 basic_block bb, dest;
4427 edge e, e_copy;
4428 edge_iterator ei;
4429 tree phi, phi_copy, phi_next, def;
4431 bb = bb_copy->rbi->original;
4433 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4435 if (!phi_nodes (e_copy->dest))
4436 continue;
4438 if (e_copy->dest->rbi->duplicated)
4439 dest = e_copy->dest->rbi->original;
4440 else
4441 dest = e_copy->dest;
4443 e = find_edge (bb, dest);
4444 if (!e)
4446 /* During loop unrolling the target of the latch edge is copied.
4447 In this case we are not looking for edge to dest, but to
4448 duplicated block whose original was dest. */
4449 FOR_EACH_EDGE (e, ei, bb->succs)
4450 if (e->dest->rbi->duplicated
4451 && e->dest->rbi->original == dest)
4452 break;
4454 gcc_assert (e != NULL);
4457 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4458 phi;
4459 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4461 phi_next = PHI_CHAIN (phi);
4463 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4464 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4465 add_phi_arg (phi_copy, def, e_copy);
4470 /* Blocks in REGION_COPY array of length N_REGION were created by
4471 duplication of basic blocks. Add phi node arguments for edges
4472 going from these blocks. */
4474 void
4475 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4477 unsigned i;
4479 for (i = 0; i < n_region; i++)
4480 region_copy[i]->rbi->duplicated = 1;
4482 for (i = 0; i < n_region; i++)
4483 add_phi_args_after_copy_bb (region_copy[i]);
4485 for (i = 0; i < n_region; i++)
4486 region_copy[i]->rbi->duplicated = 0;
4489 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4491 struct ssa_name_map_entry
4493 tree from_name;
4494 tree to_name;
4497 /* Hash function for ssa_name_map_entry. */
4499 static hashval_t
4500 ssa_name_map_entry_hash (const void *entry)
4502 const struct ssa_name_map_entry *en = entry;
4503 return SSA_NAME_VERSION (en->from_name);
4506 /* Equality function for ssa_name_map_entry. */
4508 static int
4509 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4511 const struct ssa_name_map_entry *en = in_table;
4513 return en->from_name == ssa_name;
4516 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4517 to MAP. */
4519 void
4520 allocate_ssa_names (bitmap definitions, htab_t *map)
4522 tree name;
4523 struct ssa_name_map_entry *entry;
4524 PTR *slot;
4525 unsigned ver;
4526 bitmap_iterator bi;
4528 if (!*map)
4529 *map = htab_create (10, ssa_name_map_entry_hash,
4530 ssa_name_map_entry_eq, free);
4531 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4533 name = ssa_name (ver);
4534 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4535 INSERT);
4536 if (*slot)
4537 entry = *slot;
4538 else
4540 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4541 entry->from_name = name;
4542 *slot = entry;
4544 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4548 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4549 by the mapping MAP. */
4551 static void
4552 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4554 tree name = DEF_FROM_PTR (def);
4555 struct ssa_name_map_entry *entry;
4557 gcc_assert (TREE_CODE (name) == SSA_NAME);
4559 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4560 if (!entry)
4561 return;
4563 SET_DEF (def, entry->to_name);
4564 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4567 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4569 static void
4570 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4572 tree name = USE_FROM_PTR (use);
4573 struct ssa_name_map_entry *entry;
4575 if (TREE_CODE (name) != SSA_NAME)
4576 return;
4578 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4579 if (!entry)
4580 return;
4582 SET_USE (use, entry->to_name);
4585 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4586 mapping MAP. */
4588 void
4589 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4591 unsigned i;
4592 edge e;
4593 edge_iterator ei;
4594 tree phi, stmt;
4595 block_stmt_iterator bsi;
4596 use_optype uses;
4597 vuse_optype vuses;
4598 def_optype defs;
4599 v_may_def_optype v_may_defs;
4600 v_must_def_optype v_must_defs;
4601 stmt_ann_t ann;
4603 FOR_EACH_EDGE (e, ei, bb->preds)
4604 if (e->flags & EDGE_ABNORMAL)
4605 break;
4607 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4609 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4610 if (e)
4611 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4614 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4616 stmt = bsi_stmt (bsi);
4617 get_stmt_operands (stmt);
4618 ann = stmt_ann (stmt);
4620 uses = USE_OPS (ann);
4621 for (i = 0; i < NUM_USES (uses); i++)
4622 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4624 defs = DEF_OPS (ann);
4625 for (i = 0; i < NUM_DEFS (defs); i++)
4626 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4628 vuses = VUSE_OPS (ann);
4629 for (i = 0; i < NUM_VUSES (vuses); i++)
4630 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4632 v_may_defs = V_MAY_DEF_OPS (ann);
4633 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4635 rewrite_to_new_ssa_names_use
4636 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4637 rewrite_to_new_ssa_names_def
4638 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4641 v_must_defs = V_MUST_DEF_OPS (ann);
4642 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4644 rewrite_to_new_ssa_names_def
4645 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4646 rewrite_to_new_ssa_names_use
4647 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4651 FOR_EACH_EDGE (e, ei, bb->succs)
4652 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
4654 rewrite_to_new_ssa_names_use
4655 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4657 if (e->flags & EDGE_ABNORMAL)
4659 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4660 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4665 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4666 by the mapping MAP. */
4668 void
4669 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4671 unsigned r;
4673 for (r = 0; r < n_region; r++)
4674 rewrite_to_new_ssa_names_bb (region[r], map);
4677 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4678 important exit edge EXIT. By important we mean that no SSA name defined
4679 inside region is live over the other exit edges of the region. All entry
4680 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4681 to the duplicate of the region. SSA form, dominance and loop information
4682 is updated. The new basic blocks are stored to REGION_COPY in the same
4683 order as they had in REGION, provided that REGION_COPY is not NULL.
4684 The function returns false if it is unable to copy the region,
4685 true otherwise. */
4687 bool
4688 tree_duplicate_sese_region (edge entry, edge exit,
4689 basic_block *region, unsigned n_region,
4690 basic_block *region_copy)
4692 unsigned i, n_doms, ver;
4693 bool free_region_copy = false, copying_header = false;
4694 struct loop *loop = entry->dest->loop_father;
4695 edge exit_copy;
4696 bitmap definitions;
4697 tree phi;
4698 basic_block *doms;
4699 htab_t ssa_name_map = NULL;
4700 edge redirected;
4701 bitmap_iterator bi;
4703 if (!can_copy_bbs_p (region, n_region))
4704 return false;
4706 /* Some sanity checking. Note that we do not check for all possible
4707 missuses of the functions. I.e. if you ask to copy something weird,
4708 it will work, but the state of structures probably will not be
4709 correct. */
4711 for (i = 0; i < n_region; i++)
4713 /* We do not handle subloops, i.e. all the blocks must belong to the
4714 same loop. */
4715 if (region[i]->loop_father != loop)
4716 return false;
4718 if (region[i] != entry->dest
4719 && region[i] == loop->header)
4720 return false;
4723 loop->copy = loop;
4725 /* In case the function is used for loop header copying (which is the primary
4726 use), ensure that EXIT and its copy will be new latch and entry edges. */
4727 if (loop->header == entry->dest)
4729 copying_header = true;
4730 loop->copy = loop->outer;
4732 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4733 return false;
4735 for (i = 0; i < n_region; i++)
4736 if (region[i] != exit->src
4737 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4738 return false;
4741 if (!region_copy)
4743 region_copy = xmalloc (sizeof (basic_block) * n_region);
4744 free_region_copy = true;
4747 gcc_assert (!any_marked_for_rewrite_p ());
4749 /* Record blocks outside the region that are duplicated by something
4750 inside. */
4751 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4752 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4754 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4755 definitions = marked_ssa_names ();
4757 if (copying_header)
4759 loop->header = exit->dest;
4760 loop->latch = exit->src;
4763 /* Redirect the entry and add the phi node arguments. */
4764 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4765 gcc_assert (redirected != NULL);
4766 flush_pending_stmts (entry);
4768 /* Concerning updating of dominators: We must recount dominators
4769 for entry block and its copy. Anything that is outside of the region, but
4770 was dominated by something inside needs recounting as well. */
4771 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4772 doms[n_doms++] = entry->dest->rbi->original;
4773 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4774 free (doms);
4776 /* Add the other phi node arguments. */
4777 add_phi_args_after_copy (region_copy, n_region);
4779 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4780 uses, it should be possible to emit phi nodes just for definitions that
4781 are used outside region. */
4782 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4784 tree name = ssa_name (ver);
4786 phi = create_phi_node (name, exit->dest);
4787 add_phi_arg (phi, name, exit);
4788 add_phi_arg (phi, name, exit_copy);
4790 SSA_NAME_DEF_STMT (name) = phi;
4793 /* And create new definitions inside region and its copy. TODO -- once we
4794 have immediate uses, it might be better to leave definitions in region
4795 unchanged, create new ssa names for phi nodes on exit, and rewrite
4796 the uses, to avoid changing the copied region. */
4797 allocate_ssa_names (definitions, &ssa_name_map);
4798 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
4799 allocate_ssa_names (definitions, &ssa_name_map);
4800 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
4801 htab_delete (ssa_name_map);
4803 if (free_region_copy)
4804 free (region_copy);
4806 unmark_all_for_rewrite ();
4807 BITMAP_XFREE (definitions);
4809 return true;
4812 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4814 void
4815 dump_function_to_file (tree fn, FILE *file, int flags)
4817 tree arg, vars, var;
4818 bool ignore_topmost_bind = false, any_var = false;
4819 basic_block bb;
4820 tree chain;
4822 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4824 arg = DECL_ARGUMENTS (fn);
4825 while (arg)
4827 print_generic_expr (file, arg, dump_flags);
4828 if (TREE_CHAIN (arg))
4829 fprintf (file, ", ");
4830 arg = TREE_CHAIN (arg);
4832 fprintf (file, ")\n");
4834 if (flags & TDF_RAW)
4836 dump_node (fn, TDF_SLIM | flags, file);
4837 return;
4840 /* When GIMPLE is lowered, the variables are no longer available in
4841 BIND_EXPRs, so display them separately. */
4842 if (cfun && cfun->unexpanded_var_list)
4844 ignore_topmost_bind = true;
4846 fprintf (file, "{\n");
4847 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4849 var = TREE_VALUE (vars);
4851 print_generic_decl (file, var, flags);
4852 fprintf (file, "\n");
4854 any_var = true;
4858 if (basic_block_info)
4860 /* Make a CFG based dump. */
4861 check_bb_profile (ENTRY_BLOCK_PTR, file);
4862 if (!ignore_topmost_bind)
4863 fprintf (file, "{\n");
4865 if (any_var && n_basic_blocks)
4866 fprintf (file, "\n");
4868 FOR_EACH_BB (bb)
4869 dump_generic_bb (file, bb, 2, flags);
4871 fprintf (file, "}\n");
4872 check_bb_profile (EXIT_BLOCK_PTR, file);
4874 else
4876 int indent;
4878 /* Make a tree based dump. */
4879 chain = DECL_SAVED_TREE (fn);
4881 if (TREE_CODE (chain) == BIND_EXPR)
4883 if (ignore_topmost_bind)
4885 chain = BIND_EXPR_BODY (chain);
4886 indent = 2;
4888 else
4889 indent = 0;
4891 else
4893 if (!ignore_topmost_bind)
4894 fprintf (file, "{\n");
4895 indent = 2;
4898 if (any_var)
4899 fprintf (file, "\n");
4901 print_generic_stmt_indented (file, chain, flags, indent);
4902 if (ignore_topmost_bind)
4903 fprintf (file, "}\n");
4906 fprintf (file, "\n\n");
4910 /* Pretty print of the loops intermediate representation. */
4911 static void print_loop (FILE *, struct loop *, int);
4912 static void print_pred_bbs (FILE *, basic_block bb);
4913 static void print_succ_bbs (FILE *, basic_block bb);
4916 /* Print the predecessors indexes of edge E on FILE. */
4918 static void
4919 print_pred_bbs (FILE *file, basic_block bb)
4921 edge e;
4922 edge_iterator ei;
4924 FOR_EACH_EDGE (e, ei, bb->preds)
4925 fprintf (file, "bb_%d", e->src->index);
4929 /* Print the successors indexes of edge E on FILE. */
4931 static void
4932 print_succ_bbs (FILE *file, basic_block bb)
4934 edge e;
4935 edge_iterator ei;
4937 FOR_EACH_EDGE (e, ei, bb->succs)
4938 fprintf (file, "bb_%d", e->src->index);
4942 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4944 static void
4945 print_loop (FILE *file, struct loop *loop, int indent)
4947 char *s_indent;
4948 basic_block bb;
4950 if (loop == NULL)
4951 return;
4953 s_indent = (char *) alloca ((size_t) indent + 1);
4954 memset ((void *) s_indent, ' ', (size_t) indent);
4955 s_indent[indent] = '\0';
4957 /* Print the loop's header. */
4958 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4960 /* Print the loop's body. */
4961 fprintf (file, "%s{\n", s_indent);
4962 FOR_EACH_BB (bb)
4963 if (bb->loop_father == loop)
4965 /* Print the basic_block's header. */
4966 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
4967 print_pred_bbs (file, bb);
4968 fprintf (file, "}, succs = {");
4969 print_succ_bbs (file, bb);
4970 fprintf (file, "})\n");
4972 /* Print the basic_block's body. */
4973 fprintf (file, "%s {\n", s_indent);
4974 tree_dump_bb (bb, file, indent + 4);
4975 fprintf (file, "%s }\n", s_indent);
4978 print_loop (file, loop->inner, indent + 2);
4979 fprintf (file, "%s}\n", s_indent);
4980 print_loop (file, loop->next, indent);
4984 /* Follow a CFG edge from the entry point of the program, and on entry
4985 of a loop, pretty print the loop structure on FILE. */
4987 void
4988 print_loop_ir (FILE *file)
4990 basic_block bb;
4992 bb = BASIC_BLOCK (0);
4993 if (bb && bb->loop_father)
4994 print_loop (file, bb->loop_father, 0);
4998 /* Debugging loops structure at tree level. */
5000 void
5001 debug_loop_ir (void)
5003 print_loop_ir (stderr);
5007 /* Return true if BB ends with a call, possibly followed by some
5008 instructions that must stay with the call. Return false,
5009 otherwise. */
5011 static bool
5012 tree_block_ends_with_call_p (basic_block bb)
5014 block_stmt_iterator bsi = bsi_last (bb);
5015 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5019 /* Return true if BB ends with a conditional branch. Return false,
5020 otherwise. */
5022 static bool
5023 tree_block_ends_with_condjump_p (basic_block bb)
5025 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5026 return (TREE_CODE (stmt) == COND_EXPR);
5030 /* Return true if we need to add fake edge to exit at statement T.
5031 Helper function for tree_flow_call_edges_add. */
5033 static bool
5034 need_fake_edge_p (tree t)
5036 tree call;
5038 /* NORETURN and LONGJMP calls already have an edge to exit.
5039 CONST, PURE and ALWAYS_RETURN calls do not need one.
5040 We don't currently check for CONST and PURE here, although
5041 it would be a good idea, because those attributes are
5042 figured out from the RTL in mark_constant_function, and
5043 the counter incrementation code from -fprofile-arcs
5044 leads to different results from -fbranch-probabilities. */
5045 call = get_call_expr_in (t);
5046 if (call
5047 && !(call_expr_flags (call) & (ECF_NORETURN | ECF_ALWAYS_RETURN)))
5048 return true;
5050 if (TREE_CODE (t) == ASM_EXPR
5051 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5052 return true;
5054 return false;
5058 /* Add fake edges to the function exit for any non constant and non
5059 noreturn calls, volatile inline assembly in the bitmap of blocks
5060 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5061 the number of blocks that were split.
5063 The goal is to expose cases in which entering a basic block does
5064 not imply that all subsequent instructions must be executed. */
5066 static int
5067 tree_flow_call_edges_add (sbitmap blocks)
5069 int i;
5070 int blocks_split = 0;
5071 int last_bb = last_basic_block;
5072 bool check_last_block = false;
5074 if (n_basic_blocks == 0)
5075 return 0;
5077 if (! blocks)
5078 check_last_block = true;
5079 else
5080 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5082 /* In the last basic block, before epilogue generation, there will be
5083 a fallthru edge to EXIT. Special care is required if the last insn
5084 of the last basic block is a call because make_edge folds duplicate
5085 edges, which would result in the fallthru edge also being marked
5086 fake, which would result in the fallthru edge being removed by
5087 remove_fake_edges, which would result in an invalid CFG.
5089 Moreover, we can't elide the outgoing fake edge, since the block
5090 profiler needs to take this into account in order to solve the minimal
5091 spanning tree in the case that the call doesn't return.
5093 Handle this by adding a dummy instruction in a new last basic block. */
5094 if (check_last_block)
5096 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5097 block_stmt_iterator bsi = bsi_last (bb);
5098 tree t = NULL_TREE;
5099 if (!bsi_end_p (bsi))
5100 t = bsi_stmt (bsi);
5102 if (need_fake_edge_p (t))
5104 edge e;
5106 e = find_edge (bb, EXIT_BLOCK_PTR);
5107 if (e)
5109 bsi_insert_on_edge (e, build_empty_stmt ());
5110 bsi_commit_edge_inserts ();
5115 /* Now add fake edges to the function exit for any non constant
5116 calls since there is no way that we can determine if they will
5117 return or not... */
5118 for (i = 0; i < last_bb; i++)
5120 basic_block bb = BASIC_BLOCK (i);
5121 block_stmt_iterator bsi;
5122 tree stmt, last_stmt;
5124 if (!bb)
5125 continue;
5127 if (blocks && !TEST_BIT (blocks, i))
5128 continue;
5130 bsi = bsi_last (bb);
5131 if (!bsi_end_p (bsi))
5133 last_stmt = bsi_stmt (bsi);
5136 stmt = bsi_stmt (bsi);
5137 if (need_fake_edge_p (stmt))
5139 edge e;
5140 /* The handling above of the final block before the
5141 epilogue should be enough to verify that there is
5142 no edge to the exit block in CFG already.
5143 Calling make_edge in such case would cause us to
5144 mark that edge as fake and remove it later. */
5145 #ifdef ENABLE_CHECKING
5146 if (stmt == last_stmt)
5148 e = find_edge (bb, EXIT_BLOCK_PTR);
5149 gcc_assert (e == NULL);
5151 #endif
5153 /* Note that the following may create a new basic block
5154 and renumber the existing basic blocks. */
5155 if (stmt != last_stmt)
5157 e = split_block (bb, stmt);
5158 if (e)
5159 blocks_split++;
5161 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5163 bsi_prev (&bsi);
5165 while (!bsi_end_p (bsi));
5169 if (blocks_split)
5170 verify_flow_info ();
5172 return blocks_split;
5175 bool
5176 tree_purge_dead_eh_edges (basic_block bb)
5178 bool changed = false;
5179 edge e;
5180 edge_iterator ei;
5181 tree stmt = last_stmt (bb);
5183 if (stmt && tree_can_throw_internal (stmt))
5184 return false;
5186 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5188 if (e->flags & EDGE_EH)
5190 remove_edge (e);
5191 changed = true;
5193 else
5194 ei_next (&ei);
5197 /* Removal of dead EH edges might change dominators of not
5198 just immediate successors. E.g. when bb1 is changed so that
5199 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5200 eh edges purged by this function in:
5204 1-->2
5205 / \ |
5206 v v |
5207 3-->4 |
5209 --->5
5212 idom(bb5) must be recomputed. For now just free the dominance
5213 info. */
5214 if (changed)
5215 free_dominance_info (CDI_DOMINATORS);
5217 return changed;
5220 bool
5221 tree_purge_all_dead_eh_edges (bitmap blocks)
5223 bool changed = false;
5224 unsigned i;
5225 bitmap_iterator bi;
5227 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5229 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5232 return changed;
5235 /* This function is called whenever a new edge is created or
5236 redirected. */
5238 static void
5239 tree_execute_on_growing_pred (edge e)
5241 basic_block bb = e->dest;
5243 if (phi_nodes (bb))
5244 reserve_phi_args_for_new_edge (bb);
5247 /* This function is called immediately before edge E is removed from
5248 the edge vector E->dest->preds. */
5250 static void
5251 tree_execute_on_shrinking_pred (edge e)
5253 if (phi_nodes (e->dest))
5254 remove_phi_args (e);
5257 struct cfg_hooks tree_cfg_hooks = {
5258 "tree",
5259 tree_verify_flow_info,
5260 tree_dump_bb, /* dump_bb */
5261 create_bb, /* create_basic_block */
5262 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5263 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5264 remove_bb, /* delete_basic_block */
5265 tree_split_block, /* split_block */
5266 tree_move_block_after, /* move_block_after */
5267 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5268 tree_merge_blocks, /* merge_blocks */
5269 tree_predict_edge, /* predict_edge */
5270 tree_predicted_by_p, /* predicted_by_p */
5271 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5272 tree_duplicate_bb, /* duplicate_block */
5273 tree_split_edge, /* split_edge */
5274 tree_make_forwarder_block, /* make_forward_block */
5275 NULL, /* tidy_fallthru_edge */
5276 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5277 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5278 tree_flow_call_edges_add, /* flow_call_edges_add */
5279 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5280 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5284 /* Split all critical edges. */
5286 static void
5287 split_critical_edges (void)
5289 basic_block bb;
5290 edge e;
5291 edge_iterator ei;
5293 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5294 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5295 mappings around the calls to split_edge. */
5296 start_recording_case_labels ();
5297 FOR_ALL_BB (bb)
5299 FOR_EACH_EDGE (e, ei, bb->succs)
5300 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5302 split_edge (e);
5305 end_recording_case_labels ();
5308 struct tree_opt_pass pass_split_crit_edges =
5310 "crited", /* name */
5311 NULL, /* gate */
5312 split_critical_edges, /* execute */
5313 NULL, /* sub */
5314 NULL, /* next */
5315 0, /* static_pass_number */
5316 TV_TREE_SPLIT_EDGES, /* tv_id */
5317 PROP_cfg, /* properties required */
5318 PROP_no_crit_edges, /* properties_provided */
5319 0, /* properties_destroyed */
5320 0, /* todo_flags_start */
5321 TODO_dump_func, /* todo_flags_finish */
5322 0 /* letter */
5326 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5327 a temporary, make sure and register it to be renamed if necessary,
5328 and finally return the temporary. Put the statements to compute
5329 EXP before the current statement in BSI. */
5331 tree
5332 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5334 tree t, new_stmt, orig_stmt;
5336 if (is_gimple_val (exp))
5337 return exp;
5339 t = make_rename_temp (type, NULL);
5340 new_stmt = build (MODIFY_EXPR, type, t, exp);
5342 orig_stmt = bsi_stmt (*bsi);
5343 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5344 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5346 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5348 return t;
5351 /* Build a ternary operation and gimplify it. Emit code before BSI.
5352 Return the gimple_val holding the result. */
5354 tree
5355 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5356 tree type, tree a, tree b, tree c)
5358 tree ret;
5360 ret = fold (build3 (code, type, a, b, c));
5361 STRIP_NOPS (ret);
5363 return gimplify_val (bsi, type, ret);
5366 /* Build a binary operation and gimplify it. Emit code before BSI.
5367 Return the gimple_val holding the result. */
5369 tree
5370 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5371 tree type, tree a, tree b)
5373 tree ret;
5375 ret = fold (build2 (code, type, a, b));
5376 STRIP_NOPS (ret);
5378 return gimplify_val (bsi, type, ret);
5381 /* Build a unary operation and gimplify it. Emit code before BSI.
5382 Return the gimple_val holding the result. */
5384 tree
5385 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5386 tree a)
5388 tree ret;
5390 ret = fold (build1 (code, type, a));
5391 STRIP_NOPS (ret);
5393 return gimplify_val (bsi, type, ret);
5398 /* Emit return warnings. */
5400 static void
5401 execute_warn_function_return (void)
5403 #ifdef USE_MAPPED_LOCATION
5404 source_location location;
5405 #else
5406 location_t *locus;
5407 #endif
5408 tree last;
5409 edge e;
5410 edge_iterator ei;
5412 if (warn_missing_noreturn
5413 && !TREE_THIS_VOLATILE (cfun->decl)
5414 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5415 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5416 warning ("%Jfunction might be possible candidate for "
5417 "attribute %<noreturn%>",
5418 cfun->decl);
5420 /* If we have a path to EXIT, then we do return. */
5421 if (TREE_THIS_VOLATILE (cfun->decl)
5422 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5424 #ifdef USE_MAPPED_LOCATION
5425 location = UNKNOWN_LOCATION;
5426 #else
5427 locus = NULL;
5428 #endif
5429 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5431 last = last_stmt (e->src);
5432 if (TREE_CODE (last) == RETURN_EXPR
5433 #ifdef USE_MAPPED_LOCATION
5434 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5435 #else
5436 && (locus = EXPR_LOCUS (last)) != NULL)
5437 #endif
5438 break;
5440 #ifdef USE_MAPPED_LOCATION
5441 if (location == UNKNOWN_LOCATION)
5442 location = cfun->function_end_locus;
5443 warning ("%H%<noreturn%> function does return", &location);
5444 #else
5445 if (!locus)
5446 locus = &cfun->function_end_locus;
5447 warning ("%H%<noreturn%> function does return", locus);
5448 #endif
5451 /* If we see "return;" in some basic block, then we do reach the end
5452 without returning a value. */
5453 else if (warn_return_type
5454 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5455 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5457 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5459 tree last = last_stmt (e->src);
5460 if (TREE_CODE (last) == RETURN_EXPR
5461 && TREE_OPERAND (last, 0) == NULL)
5463 #ifdef USE_MAPPED_LOCATION
5464 location = EXPR_LOCATION (last);
5465 if (location == UNKNOWN_LOCATION)
5466 location = cfun->function_end_locus;
5467 warning ("%Hcontrol reaches end of non-void function", &location);
5468 #else
5469 locus = EXPR_LOCUS (last);
5470 if (!locus)
5471 locus = &cfun->function_end_locus;
5472 warning ("%Hcontrol reaches end of non-void function", locus);
5473 #endif
5474 break;
5481 /* Given a basic block B which ends with a conditional and has
5482 precisely two successors, determine which of the edges is taken if
5483 the conditional is true and which is taken if the conditional is
5484 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5486 void
5487 extract_true_false_edges_from_block (basic_block b,
5488 edge *true_edge,
5489 edge *false_edge)
5491 edge e = EDGE_SUCC (b, 0);
5493 if (e->flags & EDGE_TRUE_VALUE)
5495 *true_edge = e;
5496 *false_edge = EDGE_SUCC (b, 1);
5498 else
5500 *false_edge = e;
5501 *true_edge = EDGE_SUCC (b, 1);
5505 struct tree_opt_pass pass_warn_function_return =
5507 NULL, /* name */
5508 NULL, /* gate */
5509 execute_warn_function_return, /* execute */
5510 NULL, /* sub */
5511 NULL, /* next */
5512 0, /* static_pass_number */
5513 0, /* tv_id */
5514 PROP_cfg, /* properties_required */
5515 0, /* properties_provided */
5516 0, /* properties_destroyed */
5517 0, /* todo_flags_start */
5518 0, /* todo_flags_finish */
5519 0 /* letter */
5522 #include "gt-tree-cfg.h"