1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
46 #include "cfglayout.h"
47 #include "tree-ssa-propagate.h"
48 #include "value-prof.h"
49 #include "pointer-set.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity
= 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t
*edge_to_cases
;
77 long num_merged_labels
;
80 static struct cfg_stats_d cfg_stats
;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto
;
85 /* Basic blocks and flowgraphs. */
86 static basic_block
create_bb (void *, void *, basic_block
);
87 static void make_blocks (tree
);
88 static void factor_computed_gotos (void);
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block
);
93 static void make_switch_expr_edges (basic_block
);
94 static void make_goto_expr_edges (basic_block
);
95 static edge
tree_redirect_edge_and_branch (edge
, basic_block
);
96 static edge
tree_try_redirect_by_replacing_jump (edge
, basic_block
);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (tree
, tree
);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge
);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t
, basic_block bb
);
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block
, basic_block
);
108 static bool tree_can_merge_blocks_p (basic_block
, basic_block
);
109 static void remove_bb (basic_block
);
110 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
111 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
112 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
113 static tree
find_case_label_for_value (tree
, tree
);
116 init_empty_tree_cfg (void)
118 /* Initialize the basic block array. */
120 profile_status
= PROFILE_ABSENT
;
121 n_basic_blocks
= NUM_FIXED_BLOCKS
;
122 last_basic_block
= NUM_FIXED_BLOCKS
;
123 basic_block_info
= VEC_alloc (basic_block
, gc
, initial_cfg_capacity
);
124 VEC_safe_grow_cleared (basic_block
, gc
, basic_block_info
,
125 initial_cfg_capacity
);
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map
= VEC_alloc (basic_block
, gc
, initial_cfg_capacity
);
129 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
130 initial_cfg_capacity
);
132 SET_BASIC_BLOCK (ENTRY_BLOCK
, ENTRY_BLOCK_PTR
);
133 SET_BASIC_BLOCK (EXIT_BLOCK
, EXIT_BLOCK_PTR
);
134 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
135 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
138 /*---------------------------------------------------------------------------
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
146 build_tree_cfg (tree
*tp
)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
153 init_empty_tree_cfg ();
155 found_computed_goto
= 0;
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto
)
164 factor_computed_gotos ();
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
168 create_empty_bb (ENTRY_BLOCK_PTR
);
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block
, basic_block_info
) < (size_t) n_basic_blocks
)
172 VEC_safe_grow_cleared (basic_block
, gc
, basic_block_info
, n_basic_blocks
);
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
182 /* Create the edges of the flowgraph. */
185 /* Debugging dumps. */
187 /* Write the flowgraph to a VCG file. */
189 int local_dump_flags
;
190 FILE *vcg_file
= dump_begin (TDI_vcg
, &local_dump_flags
);
193 tree_cfg2vcg (vcg_file
);
194 dump_end (TDI_vcg
, vcg_file
);
198 #ifdef ENABLE_CHECKING
202 /* Dump a textual representation of the flowgraph. */
204 dump_tree_cfg (dump_file
, dump_flags
);
208 execute_build_cfg (void)
210 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl
));
214 struct tree_opt_pass pass_build_cfg
=
218 execute_build_cfg
, /* execute */
221 0, /* static_pass_number */
222 TV_TREE_CFG
, /* tv_id */
223 PROP_gimple_leh
, /* properties_required */
224 PROP_cfg
, /* properties_provided */
225 0, /* properties_destroyed */
226 0, /* todo_flags_start */
227 TODO_verify_stmts
| TODO_cleanup_cfg
, /* todo_flags_finish */
231 /* Search the CFG for any computed gotos. If found, factor them to a
232 common computed goto site. Also record the location of that site so
233 that we can un-factor the gotos after we have converted back to
237 factor_computed_gotos (void)
240 tree factored_label_decl
= NULL
;
242 tree factored_computed_goto_label
= NULL
;
243 tree factored_computed_goto
= NULL
;
245 /* We know there are one or more computed gotos in this function.
246 Examine the last statement in each basic block to see if the block
247 ends with a computed goto. */
251 block_stmt_iterator bsi
= bsi_last (bb
);
256 last
= bsi_stmt (bsi
);
258 /* Ignore the computed goto we create when we factor the original
260 if (last
== factored_computed_goto
)
263 /* If the last statement is a computed goto, factor it. */
264 if (computed_goto_p (last
))
268 /* The first time we find a computed goto we need to create
269 the factored goto block and the variable each original
270 computed goto will use for their goto destination. */
271 if (! factored_computed_goto
)
273 basic_block new_bb
= create_empty_bb (bb
);
274 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
276 /* Create the destination of the factored goto. Each original
277 computed goto will put its desired destination into this
278 variable and jump to the label we create immediately
280 var
= create_tmp_var (ptr_type_node
, "gotovar");
282 /* Build a label for the new block which will contain the
283 factored computed goto. */
284 factored_label_decl
= create_artificial_label ();
285 factored_computed_goto_label
286 = build1 (LABEL_EXPR
, void_type_node
, factored_label_decl
);
287 bsi_insert_after (&new_bsi
, factored_computed_goto_label
,
290 /* Build our new computed goto. */
291 factored_computed_goto
= build1 (GOTO_EXPR
, void_type_node
, var
);
292 bsi_insert_after (&new_bsi
, factored_computed_goto
,
296 /* Copy the original computed goto's destination into VAR. */
297 assignment
= build2_gimple (GIMPLE_MODIFY_STMT
,
298 var
, GOTO_DESTINATION (last
));
299 bsi_insert_before (&bsi
, assignment
, BSI_SAME_STMT
);
301 /* And re-vector the computed goto to the new destination. */
302 GOTO_DESTINATION (last
) = factored_label_decl
;
308 /* Build a flowgraph for the statement_list STMT_LIST. */
311 make_blocks (tree stmt_list
)
313 tree_stmt_iterator i
= tsi_start (stmt_list
);
315 bool start_new_block
= true;
316 bool first_stmt_of_list
= true;
317 basic_block bb
= ENTRY_BLOCK_PTR
;
319 while (!tsi_end_p (i
))
326 /* If the statement starts a new basic block or if we have determined
327 in a previous pass that we need to create a new block for STMT, do
329 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
331 if (!first_stmt_of_list
)
332 stmt_list
= tsi_split_statement_list_before (&i
);
333 bb
= create_basic_block (stmt_list
, NULL
, bb
);
334 start_new_block
= false;
337 /* Now add STMT to BB and create the subgraphs for special statement
339 set_bb_for_stmt (stmt
, bb
);
341 if (computed_goto_p (stmt
))
342 found_computed_goto
= true;
344 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
346 if (stmt_ends_bb_p (stmt
))
347 start_new_block
= true;
350 first_stmt_of_list
= false;
355 /* Create and return a new empty basic block after bb AFTER. */
358 create_bb (void *h
, void *e
, basic_block after
)
364 /* Create and initialize a new basic block. Since alloc_block uses
365 ggc_alloc_cleared to allocate a basic block, we do not have to
366 clear the newly allocated basic block here. */
369 bb
->index
= last_basic_block
;
371 bb
->stmt_list
= h
? (tree
) h
: alloc_stmt_list ();
373 /* Add the new block to the linked list of blocks. */
374 link_block (bb
, after
);
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block
== VEC_length (basic_block
, basic_block_info
))
379 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
380 VEC_safe_grow_cleared (basic_block
, gc
, basic_block_info
, new_size
);
383 /* Add the newly created block to the array. */
384 SET_BASIC_BLOCK (last_basic_block
, bb
);
393 /*---------------------------------------------------------------------------
395 ---------------------------------------------------------------------------*/
397 /* Fold COND_EXPR_COND of each COND_EXPR. */
400 fold_cond_expr_cond (void)
406 tree stmt
= last_stmt (bb
);
409 && TREE_CODE (stmt
) == COND_EXPR
)
411 tree cond
= fold (COND_EXPR_COND (stmt
));
412 if (integer_zerop (cond
))
413 COND_EXPR_COND (stmt
) = boolean_false_node
;
414 else if (integer_onep (cond
))
415 COND_EXPR_COND (stmt
) = boolean_true_node
;
420 /* Join all the blocks in the flowgraph. */
426 struct omp_region
*cur_region
= NULL
;
428 /* Create an edge from entry to the first block with executable
430 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (NUM_FIXED_BLOCKS
), EDGE_FALLTHRU
);
432 /* Traverse the basic block array placing edges. */
435 tree last
= last_stmt (bb
);
440 enum tree_code code
= TREE_CODE (last
);
444 make_goto_expr_edges (bb
);
448 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
452 make_cond_expr_edges (bb
);
456 make_switch_expr_edges (bb
);
460 make_eh_edges (last
);
465 /* If this function receives a nonlocal goto, then we need to
466 make edges from this call site to all the nonlocal goto
468 if (tree_can_make_abnormal_goto (last
))
469 make_abnormal_goto_edges (bb
, true);
471 /* If this statement has reachable exception handlers, then
472 create abnormal edges to them. */
473 make_eh_edges (last
);
475 /* Some calls are known not to return. */
476 fallthru
= !(call_expr_flags (last
) & ECF_NORETURN
);
482 case GIMPLE_MODIFY_STMT
:
483 if (is_ctrl_altering_stmt (last
))
485 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
486 the CALL_EXPR may have an abnormal edge. Search the RHS
487 for this case and create any required edges. */
488 if (tree_can_make_abnormal_goto (last
))
489 make_abnormal_goto_edges (bb
, true);
491 make_eh_edges (last
);
503 cur_region
= new_omp_region (bb
, code
, cur_region
);
508 cur_region
= new_omp_region (bb
, code
, cur_region
);
513 /* In the case of an OMP_SECTION, the edge will go somewhere
514 other than the next block. This will be created later. */
515 cur_region
->exit
= bb
;
516 fallthru
= cur_region
->type
!= OMP_SECTION
;
517 cur_region
= cur_region
->outer
;
521 cur_region
->cont
= bb
;
522 switch (cur_region
->type
)
525 /* ??? Technically there should be a some sort of loopback
526 edge here, but it goes to a block that doesn't exist yet,
527 and without it, updating the ssa form would be a real
528 bear. Fortunately, we don't yet do ssa before expanding
533 /* Wire up the edges into and out of the nested sections. */
534 /* ??? Similarly wrt loopback. */
536 struct omp_region
*i
;
537 for (i
= cur_region
->inner
; i
; i
= i
->next
)
539 gcc_assert (i
->type
== OMP_SECTION
);
540 make_edge (cur_region
->entry
, i
->entry
, 0);
541 make_edge (i
->exit
, bb
, EDGE_FALLTHRU
);
553 gcc_assert (!stmt_ends_bb_p (last
));
561 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
567 /* Fold COND_EXPR_COND of each COND_EXPR. */
568 fold_cond_expr_cond ();
572 /* Create the edges for a COND_EXPR starting at block BB.
573 At this point, both clauses must contain only simple gotos. */
576 make_cond_expr_edges (basic_block bb
)
578 tree entry
= last_stmt (bb
);
579 basic_block then_bb
, else_bb
;
580 tree then_label
, else_label
;
584 gcc_assert (TREE_CODE (entry
) == COND_EXPR
);
586 /* Entry basic blocks for each component. */
587 then_label
= GOTO_DESTINATION (COND_EXPR_THEN (entry
));
588 else_label
= GOTO_DESTINATION (COND_EXPR_ELSE (entry
));
589 then_bb
= label_to_block (then_label
);
590 else_bb
= label_to_block (else_label
);
592 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
593 #ifdef USE_MAPPED_LOCATION
594 e
->goto_locus
= EXPR_LOCATION (COND_EXPR_THEN (entry
));
596 e
->goto_locus
= EXPR_LOCUS (COND_EXPR_THEN (entry
));
598 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
601 #ifdef USE_MAPPED_LOCATION
602 e
->goto_locus
= EXPR_LOCATION (COND_EXPR_ELSE (entry
));
604 e
->goto_locus
= EXPR_LOCUS (COND_EXPR_ELSE (entry
));
610 /* Called for each element in the hash table (P) as we delete the
611 edge to cases hash table.
613 Clear all the TREE_CHAINs to prevent problems with copying of
614 SWITCH_EXPRs and structure sharing rules, then free the hash table
618 edge_to_cases_cleanup (void *key ATTRIBUTE_UNUSED
, void **value
,
619 void *data ATTRIBUTE_UNUSED
)
623 for (t
= (tree
) *value
; t
; t
= next
)
625 next
= TREE_CHAIN (t
);
626 TREE_CHAIN (t
) = NULL
;
633 /* Start recording information mapping edges to case labels. */
636 start_recording_case_labels (void)
638 gcc_assert (edge_to_cases
== NULL
);
639 edge_to_cases
= pointer_map_create ();
642 /* Return nonzero if we are recording information for case labels. */
645 recording_case_labels_p (void)
647 return (edge_to_cases
!= NULL
);
650 /* Stop recording information mapping edges to case labels and
651 remove any information we have recorded. */
653 end_recording_case_labels (void)
655 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
656 pointer_map_destroy (edge_to_cases
);
657 edge_to_cases
= NULL
;
660 /* If we are inside a {start,end}_recording_cases block, then return
661 a chain of CASE_LABEL_EXPRs from T which reference E.
663 Otherwise return NULL. */
666 get_cases_for_edge (edge e
, tree t
)
672 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
673 chains available. Return NULL so the caller can detect this case. */
674 if (!recording_case_labels_p ())
677 slot
= pointer_map_contains (edge_to_cases
, e
);
681 /* If we did not find E in the hash table, then this must be the first
682 time we have been queried for information about E & T. Add all the
683 elements from T to the hash table then perform the query again. */
685 vec
= SWITCH_LABELS (t
);
686 n
= TREE_VEC_LENGTH (vec
);
687 for (i
= 0; i
< n
; i
++)
689 tree elt
= TREE_VEC_ELT (vec
, i
);
690 tree lab
= CASE_LABEL (elt
);
691 basic_block label_bb
= label_to_block (lab
);
692 edge this_edge
= find_edge (e
->src
, label_bb
);
694 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
696 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
697 TREE_CHAIN (elt
) = (tree
) *slot
;
701 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
704 /* Create the edges for a SWITCH_EXPR starting at block BB.
705 At this point, the switch body has been lowered and the
706 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
709 make_switch_expr_edges (basic_block bb
)
711 tree entry
= last_stmt (bb
);
715 vec
= SWITCH_LABELS (entry
);
716 n
= TREE_VEC_LENGTH (vec
);
718 for (i
= 0; i
< n
; ++i
)
720 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
721 basic_block label_bb
= label_to_block (lab
);
722 make_edge (bb
, label_bb
, 0);
727 /* Return the basic block holding label DEST. */
730 label_to_block_fn (struct function
*ifun
, tree dest
)
732 int uid
= LABEL_DECL_UID (dest
);
734 /* We would die hard when faced by an undefined label. Emit a label to
735 the very first basic block. This will hopefully make even the dataflow
736 and undefined variable warnings quite right. */
737 if ((errorcount
|| sorrycount
) && uid
< 0)
739 block_stmt_iterator bsi
=
740 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
743 stmt
= build1 (LABEL_EXPR
, void_type_node
, dest
);
744 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
745 uid
= LABEL_DECL_UID (dest
);
747 if (VEC_length (basic_block
, ifun
->cfg
->x_label_to_block_map
)
748 <= (unsigned int) uid
)
750 return VEC_index (basic_block
, ifun
->cfg
->x_label_to_block_map
, uid
);
753 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
754 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
757 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
759 basic_block target_bb
;
760 block_stmt_iterator bsi
;
762 FOR_EACH_BB (target_bb
)
763 for (bsi
= bsi_start (target_bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
765 tree target
= bsi_stmt (bsi
);
767 if (TREE_CODE (target
) != LABEL_EXPR
)
770 target
= LABEL_EXPR_LABEL (target
);
772 /* Make an edge to every label block that has been marked as a
773 potential target for a computed goto or a non-local goto. */
774 if ((FORCED_LABEL (target
) && !for_call
)
775 || (DECL_NONLOCAL (target
) && for_call
))
777 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
783 /* Create edges for a goto statement at block BB. */
786 make_goto_expr_edges (basic_block bb
)
788 block_stmt_iterator last
= bsi_last (bb
);
789 tree goto_t
= bsi_stmt (last
);
791 /* A simple GOTO creates normal edges. */
792 if (simple_goto_p (goto_t
))
794 tree dest
= GOTO_DESTINATION (goto_t
);
795 edge e
= make_edge (bb
, label_to_block (dest
), EDGE_FALLTHRU
);
796 #ifdef USE_MAPPED_LOCATION
797 e
->goto_locus
= EXPR_LOCATION (goto_t
);
799 e
->goto_locus
= EXPR_LOCUS (goto_t
);
801 bsi_remove (&last
, true);
805 /* A computed GOTO creates abnormal edges. */
806 make_abnormal_goto_edges (bb
, false);
810 /*---------------------------------------------------------------------------
812 ---------------------------------------------------------------------------*/
814 /* Cleanup useless labels in basic blocks. This is something we wish
815 to do early because it allows us to group case labels before creating
816 the edges for the CFG, and it speeds up block statement iterators in
818 We only run this pass once, running it more than once is probably not
821 /* A map from basic block index to the leading label of that block. */
822 static tree
*label_for_bb
;
824 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
826 update_eh_label (struct eh_region
*region
)
828 tree old_label
= get_eh_region_tree_label (region
);
832 basic_block bb
= label_to_block (old_label
);
834 /* ??? After optimizing, there may be EH regions with labels
835 that have already been removed from the function body, so
836 there is no basic block for them. */
840 new_label
= label_for_bb
[bb
->index
];
841 set_eh_region_tree_label (region
, new_label
);
845 /* Given LABEL return the first label in the same basic block. */
847 main_block_label (tree label
)
849 basic_block bb
= label_to_block (label
);
851 /* label_to_block possibly inserted undefined label into the chain. */
852 if (!label_for_bb
[bb
->index
])
853 label_for_bb
[bb
->index
] = label
;
854 return label_for_bb
[bb
->index
];
857 /* Cleanup redundant labels. This is a three-step process:
858 1) Find the leading label for each block.
859 2) Redirect all references to labels to the leading labels.
860 3) Cleanup all useless labels. */
863 cleanup_dead_labels (void)
866 label_for_bb
= XCNEWVEC (tree
, last_basic_block
);
868 /* Find a suitable label for each block. We use the first user-defined
869 label if there is one, or otherwise just the first label we see. */
872 block_stmt_iterator i
;
874 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
876 tree label
, stmt
= bsi_stmt (i
);
878 if (TREE_CODE (stmt
) != LABEL_EXPR
)
881 label
= LABEL_EXPR_LABEL (stmt
);
883 /* If we have not yet seen a label for the current block,
884 remember this one and see if there are more labels. */
885 if (! label_for_bb
[bb
->index
])
887 label_for_bb
[bb
->index
] = label
;
891 /* If we did see a label for the current block already, but it
892 is an artificially created label, replace it if the current
893 label is a user defined label. */
894 if (! DECL_ARTIFICIAL (label
)
895 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
]))
897 label_for_bb
[bb
->index
] = label
;
903 /* Now redirect all jumps/branches to the selected label.
904 First do so for each block ending in a control statement. */
907 tree stmt
= last_stmt (bb
);
911 switch (TREE_CODE (stmt
))
915 tree true_branch
, false_branch
;
917 true_branch
= COND_EXPR_THEN (stmt
);
918 false_branch
= COND_EXPR_ELSE (stmt
);
920 GOTO_DESTINATION (true_branch
)
921 = main_block_label (GOTO_DESTINATION (true_branch
));
922 GOTO_DESTINATION (false_branch
)
923 = main_block_label (GOTO_DESTINATION (false_branch
));
931 tree vec
= SWITCH_LABELS (stmt
);
932 size_t n
= TREE_VEC_LENGTH (vec
);
934 /* Replace all destination labels. */
935 for (i
= 0; i
< n
; ++i
)
937 tree elt
= TREE_VEC_ELT (vec
, i
);
938 tree label
= main_block_label (CASE_LABEL (elt
));
939 CASE_LABEL (elt
) = label
;
944 /* We have to handle GOTO_EXPRs until they're removed, and we don't
945 remove them until after we've created the CFG edges. */
947 if (! computed_goto_p (stmt
))
949 GOTO_DESTINATION (stmt
)
950 = main_block_label (GOTO_DESTINATION (stmt
));
959 for_each_eh_region (update_eh_label
);
961 /* Finally, purge dead labels. All user-defined labels and labels that
962 can be the target of non-local gotos and labels which have their
963 address taken are preserved. */
966 block_stmt_iterator i
;
967 tree label_for_this_bb
= label_for_bb
[bb
->index
];
969 if (! label_for_this_bb
)
972 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
974 tree label
, stmt
= bsi_stmt (i
);
976 if (TREE_CODE (stmt
) != LABEL_EXPR
)
979 label
= LABEL_EXPR_LABEL (stmt
);
981 if (label
== label_for_this_bb
982 || ! DECL_ARTIFICIAL (label
)
983 || DECL_NONLOCAL (label
)
984 || FORCED_LABEL (label
))
987 bsi_remove (&i
, true);
994 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
995 and scan the sorted vector of cases. Combine the ones jumping to the
997 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1000 group_case_labels (void)
1006 tree stmt
= last_stmt (bb
);
1007 if (stmt
&& TREE_CODE (stmt
) == SWITCH_EXPR
)
1009 tree labels
= SWITCH_LABELS (stmt
);
1010 int old_size
= TREE_VEC_LENGTH (labels
);
1011 int i
, j
, new_size
= old_size
;
1012 tree default_case
= TREE_VEC_ELT (labels
, old_size
- 1);
1015 /* The default label is always the last case in a switch
1016 statement after gimplification. */
1017 default_label
= CASE_LABEL (default_case
);
1019 /* Look for possible opportunities to merge cases.
1020 Ignore the last element of the label vector because it
1021 must be the default case. */
1023 while (i
< old_size
- 1)
1025 tree base_case
, base_label
, base_high
;
1026 base_case
= TREE_VEC_ELT (labels
, i
);
1028 gcc_assert (base_case
);
1029 base_label
= CASE_LABEL (base_case
);
1031 /* Discard cases that have the same destination as the
1033 if (base_label
== default_label
)
1035 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1041 base_high
= CASE_HIGH (base_case
) ?
1042 CASE_HIGH (base_case
) : CASE_LOW (base_case
);
1044 /* Try to merge case labels. Break out when we reach the end
1045 of the label vector or when we cannot merge the next case
1046 label with the current one. */
1047 while (i
< old_size
- 1)
1049 tree merge_case
= TREE_VEC_ELT (labels
, i
);
1050 tree merge_label
= CASE_LABEL (merge_case
);
1051 tree t
= int_const_binop (PLUS_EXPR
, base_high
,
1052 integer_one_node
, 1);
1054 /* Merge the cases if they jump to the same place,
1055 and their ranges are consecutive. */
1056 if (merge_label
== base_label
1057 && tree_int_cst_equal (CASE_LOW (merge_case
), t
))
1059 base_high
= CASE_HIGH (merge_case
) ?
1060 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1061 CASE_HIGH (base_case
) = base_high
;
1062 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1071 /* Compress the case labels in the label vector, and adjust the
1072 length of the vector. */
1073 for (i
= 0, j
= 0; i
< new_size
; i
++)
1075 while (! TREE_VEC_ELT (labels
, j
))
1077 TREE_VEC_ELT (labels
, i
) = TREE_VEC_ELT (labels
, j
++);
1079 TREE_VEC_LENGTH (labels
) = new_size
;
1084 /* Checks whether we can merge block B into block A. */
1087 tree_can_merge_blocks_p (basic_block a
, basic_block b
)
1090 block_stmt_iterator bsi
;
1093 if (!single_succ_p (a
))
1096 if (single_succ_edge (a
)->flags
& EDGE_ABNORMAL
)
1099 if (single_succ (a
) != b
)
1102 if (!single_pred_p (b
))
1105 if (b
== EXIT_BLOCK_PTR
)
1108 /* If A ends by a statement causing exceptions or something similar, we
1109 cannot merge the blocks. */
1110 stmt
= last_stmt (a
);
1111 if (stmt
&& stmt_ends_bb_p (stmt
))
1114 /* Do not allow a block with only a non-local label to be merged. */
1115 if (stmt
&& TREE_CODE (stmt
) == LABEL_EXPR
1116 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1119 /* It must be possible to eliminate all phi nodes in B. If ssa form
1120 is not up-to-date, we cannot eliminate any phis; however, if only
1121 some symbols as whole are marked for renaming, this is not a problem,
1122 as phi nodes for those symbols are irrelevant in updating anyway. */
1123 phi
= phi_nodes (b
);
1126 if (name_mappings_registered_p ())
1129 for (; phi
; phi
= PHI_CHAIN (phi
))
1130 if (!is_gimple_reg (PHI_RESULT (phi
))
1131 && !may_propagate_copy (PHI_RESULT (phi
), PHI_ARG_DEF (phi
, 0)))
1135 /* Do not remove user labels. */
1136 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1138 stmt
= bsi_stmt (bsi
);
1139 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1141 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt
)))
1145 /* Protect the loop latches. */
1147 && b
->loop_father
->latch
== b
)
1153 /* Replaces all uses of NAME by VAL. */
1156 replace_uses_by (tree name
, tree val
)
1158 imm_use_iterator imm_iter
;
1163 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1165 if (TREE_CODE (stmt
) != PHI_NODE
)
1166 push_stmt_changes (&stmt
);
1168 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1170 replace_exp (use
, val
);
1172 if (TREE_CODE (stmt
) == PHI_NODE
)
1174 e
= PHI_ARG_EDGE (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1175 if (e
->flags
& EDGE_ABNORMAL
)
1177 /* This can only occur for virtual operands, since
1178 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1179 would prevent replacement. */
1180 gcc_assert (!is_gimple_reg (name
));
1181 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1186 if (TREE_CODE (stmt
) != PHI_NODE
)
1190 fold_stmt_inplace (stmt
);
1192 /* FIXME. This should go in pop_stmt_changes. */
1193 rhs
= get_rhs (stmt
);
1194 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1195 recompute_tree_invariant_for_addr_expr (rhs
);
1197 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1199 pop_stmt_changes (&stmt
);
1203 gcc_assert (zero_imm_uses_p (name
));
1205 /* Also update the trees stored in loop structures. */
1211 FOR_EACH_LOOP (li
, loop
, 0)
1213 substitute_in_loop_info (loop
, name
, val
);
1218 /* Merge block B into block A. */
1221 tree_merge_blocks (basic_block a
, basic_block b
)
1223 block_stmt_iterator bsi
;
1224 tree_stmt_iterator last
;
1228 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1230 /* Remove all single-valued PHI nodes from block B of the form
1231 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1233 for (phi
= phi_nodes (b
); phi
; phi
= phi_nodes (b
))
1235 tree def
= PHI_RESULT (phi
), use
= PHI_ARG_DEF (phi
, 0);
1237 bool may_replace_uses
= may_propagate_copy (def
, use
);
1239 /* In case we have loops to care about, do not propagate arguments of
1240 loop closed ssa phi nodes. */
1242 && is_gimple_reg (def
)
1243 && TREE_CODE (use
) == SSA_NAME
1244 && a
->loop_father
!= b
->loop_father
)
1245 may_replace_uses
= false;
1247 if (!may_replace_uses
)
1249 gcc_assert (is_gimple_reg (def
));
1251 /* Note that just emitting the copies is fine -- there is no problem
1252 with ordering of phi nodes. This is because A is the single
1253 predecessor of B, therefore results of the phi nodes cannot
1254 appear as arguments of the phi nodes. */
1255 copy
= build2_gimple (GIMPLE_MODIFY_STMT
, def
, use
);
1256 bsi_insert_after (&bsi
, copy
, BSI_NEW_STMT
);
1257 SSA_NAME_DEF_STMT (def
) = copy
;
1258 remove_phi_node (phi
, NULL
, false);
1262 replace_uses_by (def
, use
);
1263 remove_phi_node (phi
, NULL
, true);
1267 /* Ensure that B follows A. */
1268 move_block_after (b
, a
);
1270 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1271 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1273 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1274 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
);)
1276 if (TREE_CODE (bsi_stmt (bsi
)) == LABEL_EXPR
)
1278 tree label
= bsi_stmt (bsi
);
1280 bsi_remove (&bsi
, false);
1281 /* Now that we can thread computed gotos, we might have
1282 a situation where we have a forced label in block B
1283 However, the label at the start of block B might still be
1284 used in other ways (think about the runtime checking for
1285 Fortran assigned gotos). So we can not just delete the
1286 label. Instead we move the label to the start of block A. */
1287 if (FORCED_LABEL (LABEL_EXPR_LABEL (label
)))
1289 block_stmt_iterator dest_bsi
= bsi_start (a
);
1290 bsi_insert_before (&dest_bsi
, label
, BSI_NEW_STMT
);
1295 change_bb_for_stmt (bsi_stmt (bsi
), a
);
1300 /* Merge the chains. */
1301 last
= tsi_last (a
->stmt_list
);
1302 tsi_link_after (&last
, b
->stmt_list
, TSI_NEW_STMT
);
1303 b
->stmt_list
= NULL
;
1307 /* Return the one of two successors of BB that is not reachable by a
1308 reached by a complex edge, if there is one. Else, return BB. We use
1309 this in optimizations that use post-dominators for their heuristics,
1310 to catch the cases in C++ where function calls are involved. */
1313 single_noncomplex_succ (basic_block bb
)
1316 if (EDGE_COUNT (bb
->succs
) != 2)
1319 e0
= EDGE_SUCC (bb
, 0);
1320 e1
= EDGE_SUCC (bb
, 1);
1321 if (e0
->flags
& EDGE_COMPLEX
)
1323 if (e1
->flags
& EDGE_COMPLEX
)
1330 /* Walk the function tree removing unnecessary statements.
1332 * Empty statement nodes are removed
1334 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1336 * Unnecessary COND_EXPRs are removed
1338 * Some unnecessary BIND_EXPRs are removed
1340 Clearly more work could be done. The trick is doing the analysis
1341 and removal fast enough to be a net improvement in compile times.
1343 Note that when we remove a control structure such as a COND_EXPR
1344 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1345 to ensure we eliminate all the useless code. */
1356 static void remove_useless_stmts_1 (tree
*, struct rus_data
*);
1359 remove_useless_stmts_warn_notreached (tree stmt
)
1361 if (EXPR_HAS_LOCATION (stmt
))
1363 location_t loc
= EXPR_LOCATION (stmt
);
1364 if (LOCATION_LINE (loc
) > 0)
1366 warning (0, "%Hwill never be executed", &loc
);
1371 switch (TREE_CODE (stmt
))
1373 case STATEMENT_LIST
:
1375 tree_stmt_iterator i
;
1376 for (i
= tsi_start (stmt
); !tsi_end_p (i
); tsi_next (&i
))
1377 if (remove_useless_stmts_warn_notreached (tsi_stmt (i
)))
1383 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt
)))
1385 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt
)))
1387 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt
)))
1391 case TRY_FINALLY_EXPR
:
1392 case TRY_CATCH_EXPR
:
1393 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 0)))
1395 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 1)))
1400 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt
));
1401 case EH_FILTER_EXPR
:
1402 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt
));
1404 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt
));
1407 /* Not a live container. */
1415 remove_useless_stmts_cond (tree
*stmt_p
, struct rus_data
*data
)
1417 tree then_clause
, else_clause
, cond
;
1418 bool save_has_label
, then_has_label
, else_has_label
;
1420 save_has_label
= data
->has_label
;
1421 data
->has_label
= false;
1422 data
->last_goto
= NULL
;
1424 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p
), data
);
1426 then_has_label
= data
->has_label
;
1427 data
->has_label
= false;
1428 data
->last_goto
= NULL
;
1430 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p
), data
);
1432 else_has_label
= data
->has_label
;
1433 data
->has_label
= save_has_label
| then_has_label
| else_has_label
;
1435 then_clause
= COND_EXPR_THEN (*stmt_p
);
1436 else_clause
= COND_EXPR_ELSE (*stmt_p
);
1437 cond
= fold (COND_EXPR_COND (*stmt_p
));
1439 /* If neither arm does anything at all, we can remove the whole IF. */
1440 if (!TREE_SIDE_EFFECTS (then_clause
) && !TREE_SIDE_EFFECTS (else_clause
))
1442 *stmt_p
= build_empty_stmt ();
1443 data
->repeat
= true;
1446 /* If there are no reachable statements in an arm, then we can
1447 zap the entire conditional. */
1448 else if (integer_nonzerop (cond
) && !else_has_label
)
1450 if (warn_notreached
)
1451 remove_useless_stmts_warn_notreached (else_clause
);
1452 *stmt_p
= then_clause
;
1453 data
->repeat
= true;
1455 else if (integer_zerop (cond
) && !then_has_label
)
1457 if (warn_notreached
)
1458 remove_useless_stmts_warn_notreached (then_clause
);
1459 *stmt_p
= else_clause
;
1460 data
->repeat
= true;
1463 /* Check a couple of simple things on then/else with single stmts. */
1466 tree then_stmt
= expr_only (then_clause
);
1467 tree else_stmt
= expr_only (else_clause
);
1469 /* Notice branches to a common destination. */
1470 if (then_stmt
&& else_stmt
1471 && TREE_CODE (then_stmt
) == GOTO_EXPR
1472 && TREE_CODE (else_stmt
) == GOTO_EXPR
1473 && (GOTO_DESTINATION (then_stmt
) == GOTO_DESTINATION (else_stmt
)))
1475 *stmt_p
= then_stmt
;
1476 data
->repeat
= true;
1479 /* If the THEN/ELSE clause merely assigns a value to a variable or
1480 parameter which is already known to contain that value, then
1481 remove the useless THEN/ELSE clause. */
1482 else if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1485 && TREE_CODE (else_stmt
) == GIMPLE_MODIFY_STMT
1486 && GIMPLE_STMT_OPERAND (else_stmt
, 0) == cond
1487 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt
, 1)))
1488 COND_EXPR_ELSE (*stmt_p
) = alloc_stmt_list ();
1490 else if ((TREE_CODE (cond
) == EQ_EXPR
|| TREE_CODE (cond
) == NE_EXPR
)
1491 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1492 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1493 && TREE_CONSTANT (TREE_OPERAND (cond
, 1)))
1495 tree stmt
= (TREE_CODE (cond
) == EQ_EXPR
1496 ? then_stmt
: else_stmt
);
1497 tree
*location
= (TREE_CODE (cond
) == EQ_EXPR
1498 ? &COND_EXPR_THEN (*stmt_p
)
1499 : &COND_EXPR_ELSE (*stmt_p
));
1502 && TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
1503 && GIMPLE_STMT_OPERAND (stmt
, 0) == TREE_OPERAND (cond
, 0)
1504 && GIMPLE_STMT_OPERAND (stmt
, 1) == TREE_OPERAND (cond
, 1))
1505 *location
= alloc_stmt_list ();
1509 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1510 would be re-introduced during lowering. */
1511 data
->last_goto
= NULL
;
1516 remove_useless_stmts_tf (tree
*stmt_p
, struct rus_data
*data
)
1518 bool save_may_branch
, save_may_throw
;
1519 bool this_may_branch
, this_may_throw
;
1521 /* Collect may_branch and may_throw information for the body only. */
1522 save_may_branch
= data
->may_branch
;
1523 save_may_throw
= data
->may_throw
;
1524 data
->may_branch
= false;
1525 data
->may_throw
= false;
1526 data
->last_goto
= NULL
;
1528 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1530 this_may_branch
= data
->may_branch
;
1531 this_may_throw
= data
->may_throw
;
1532 data
->may_branch
|= save_may_branch
;
1533 data
->may_throw
|= save_may_throw
;
1534 data
->last_goto
= NULL
;
1536 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1538 /* If the body is empty, then we can emit the FINALLY block without
1539 the enclosing TRY_FINALLY_EXPR. */
1540 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 0)))
1542 *stmt_p
= TREE_OPERAND (*stmt_p
, 1);
1543 data
->repeat
= true;
1546 /* If the handler is empty, then we can emit the TRY block without
1547 the enclosing TRY_FINALLY_EXPR. */
1548 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1550 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1551 data
->repeat
= true;
1554 /* If the body neither throws, nor branches, then we can safely
1555 string the TRY and FINALLY blocks together. */
1556 else if (!this_may_branch
&& !this_may_throw
)
1558 tree stmt
= *stmt_p
;
1559 *stmt_p
= TREE_OPERAND (stmt
, 0);
1560 append_to_statement_list (TREE_OPERAND (stmt
, 1), stmt_p
);
1561 data
->repeat
= true;
1567 remove_useless_stmts_tc (tree
*stmt_p
, struct rus_data
*data
)
1569 bool save_may_throw
, this_may_throw
;
1570 tree_stmt_iterator i
;
1573 /* Collect may_throw information for the body only. */
1574 save_may_throw
= data
->may_throw
;
1575 data
->may_throw
= false;
1576 data
->last_goto
= NULL
;
1578 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1580 this_may_throw
= data
->may_throw
;
1581 data
->may_throw
= save_may_throw
;
1583 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1584 if (!this_may_throw
)
1586 if (warn_notreached
)
1587 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p
, 1));
1588 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1589 data
->repeat
= true;
1593 /* Process the catch clause specially. We may be able to tell that
1594 no exceptions propagate past this point. */
1596 this_may_throw
= true;
1597 i
= tsi_start (TREE_OPERAND (*stmt_p
, 1));
1598 stmt
= tsi_stmt (i
);
1599 data
->last_goto
= NULL
;
1601 switch (TREE_CODE (stmt
))
1604 for (; !tsi_end_p (i
); tsi_next (&i
))
1606 stmt
= tsi_stmt (i
);
1607 /* If we catch all exceptions, then the body does not
1608 propagate exceptions past this point. */
1609 if (CATCH_TYPES (stmt
) == NULL
)
1610 this_may_throw
= false;
1611 data
->last_goto
= NULL
;
1612 remove_useless_stmts_1 (&CATCH_BODY (stmt
), data
);
1616 case EH_FILTER_EXPR
:
1617 if (EH_FILTER_MUST_NOT_THROW (stmt
))
1618 this_may_throw
= false;
1619 else if (EH_FILTER_TYPES (stmt
) == NULL
)
1620 this_may_throw
= false;
1621 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt
), data
);
1625 /* Otherwise this is a cleanup. */
1626 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1628 /* If the cleanup is empty, then we can emit the TRY block without
1629 the enclosing TRY_CATCH_EXPR. */
1630 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1632 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1633 data
->repeat
= true;
1637 data
->may_throw
|= this_may_throw
;
1642 remove_useless_stmts_bind (tree
*stmt_p
, struct rus_data
*data
)
1646 /* First remove anything underneath the BIND_EXPR. */
1647 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p
), data
);
1649 /* If the BIND_EXPR has no variables, then we can pull everything
1650 up one level and remove the BIND_EXPR, unless this is the toplevel
1651 BIND_EXPR for the current function or an inlined function.
1653 When this situation occurs we will want to apply this
1654 optimization again. */
1655 block
= BIND_EXPR_BLOCK (*stmt_p
);
1656 if (BIND_EXPR_VARS (*stmt_p
) == NULL_TREE
1657 && *stmt_p
!= DECL_SAVED_TREE (current_function_decl
)
1659 || ! BLOCK_ABSTRACT_ORIGIN (block
)
1660 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
))
1663 *stmt_p
= BIND_EXPR_BODY (*stmt_p
);
1664 data
->repeat
= true;
1670 remove_useless_stmts_goto (tree
*stmt_p
, struct rus_data
*data
)
1672 tree dest
= GOTO_DESTINATION (*stmt_p
);
1674 data
->may_branch
= true;
1675 data
->last_goto
= NULL
;
1677 /* Record the last goto expr, so that we can delete it if unnecessary. */
1678 if (TREE_CODE (dest
) == LABEL_DECL
)
1679 data
->last_goto
= stmt_p
;
1684 remove_useless_stmts_label (tree
*stmt_p
, struct rus_data
*data
)
1686 tree label
= LABEL_EXPR_LABEL (*stmt_p
);
1688 data
->has_label
= true;
1690 /* We do want to jump across non-local label receiver code. */
1691 if (DECL_NONLOCAL (label
))
1692 data
->last_goto
= NULL
;
1694 else if (data
->last_goto
&& GOTO_DESTINATION (*data
->last_goto
) == label
)
1696 *data
->last_goto
= build_empty_stmt ();
1697 data
->repeat
= true;
1700 /* ??? Add something here to delete unused labels. */
1704 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1705 decl. This allows us to eliminate redundant or useless
1706 calls to "const" functions.
1708 Gimplifier already does the same operation, but we may notice functions
1709 being const and pure once their calls has been gimplified, so we need
1710 to update the flag. */
1713 update_call_expr_flags (tree call
)
1715 tree decl
= get_callee_fndecl (call
);
1718 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1719 TREE_SIDE_EFFECTS (call
) = 0;
1720 if (TREE_NOTHROW (decl
))
1721 TREE_NOTHROW (call
) = 1;
1725 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1728 notice_special_calls (tree t
)
1730 int flags
= call_expr_flags (t
);
1732 if (flags
& ECF_MAY_BE_ALLOCA
)
1733 current_function_calls_alloca
= true;
1734 if (flags
& ECF_RETURNS_TWICE
)
1735 current_function_calls_setjmp
= true;
1739 /* Clear flags set by notice_special_calls. Used by dead code removal
1740 to update the flags. */
1743 clear_special_calls (void)
1745 current_function_calls_alloca
= false;
1746 current_function_calls_setjmp
= false;
1751 remove_useless_stmts_1 (tree
*tp
, struct rus_data
*data
)
1755 switch (TREE_CODE (t
))
1758 remove_useless_stmts_cond (tp
, data
);
1761 case TRY_FINALLY_EXPR
:
1762 remove_useless_stmts_tf (tp
, data
);
1765 case TRY_CATCH_EXPR
:
1766 remove_useless_stmts_tc (tp
, data
);
1770 remove_useless_stmts_bind (tp
, data
);
1774 remove_useless_stmts_goto (tp
, data
);
1778 remove_useless_stmts_label (tp
, data
);
1783 data
->last_goto
= NULL
;
1784 data
->may_branch
= true;
1789 data
->last_goto
= NULL
;
1790 notice_special_calls (t
);
1791 update_call_expr_flags (t
);
1792 if (tree_could_throw_p (t
))
1793 data
->may_throw
= true;
1799 case GIMPLE_MODIFY_STMT
:
1800 data
->last_goto
= NULL
;
1802 op
= get_call_expr_in (t
);
1805 update_call_expr_flags (op
);
1806 notice_special_calls (op
);
1808 if (tree_could_throw_p (t
))
1809 data
->may_throw
= true;
1812 case STATEMENT_LIST
:
1814 tree_stmt_iterator i
= tsi_start (t
);
1815 while (!tsi_end_p (i
))
1818 if (IS_EMPTY_STMT (t
))
1824 remove_useless_stmts_1 (tsi_stmt_ptr (i
), data
);
1827 if (TREE_CODE (t
) == STATEMENT_LIST
)
1829 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1839 data
->last_goto
= NULL
;
1843 data
->last_goto
= NULL
;
1849 remove_useless_stmts (void)
1851 struct rus_data data
;
1853 clear_special_calls ();
1857 memset (&data
, 0, sizeof (data
));
1858 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl
), &data
);
1860 while (data
.repeat
);
1865 struct tree_opt_pass pass_remove_useless_stmts
=
1867 "useless", /* name */
1869 remove_useless_stmts
, /* execute */
1872 0, /* static_pass_number */
1874 PROP_gimple_any
, /* properties_required */
1875 0, /* properties_provided */
1876 0, /* properties_destroyed */
1877 0, /* todo_flags_start */
1878 TODO_dump_func
, /* todo_flags_finish */
1882 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1885 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1889 /* Since this block is no longer reachable, we can just delete all
1890 of its PHI nodes. */
1891 phi
= phi_nodes (bb
);
1894 tree next
= PHI_CHAIN (phi
);
1895 remove_phi_node (phi
, NULL_TREE
, true);
1899 /* Remove edges to BB's successors. */
1900 while (EDGE_COUNT (bb
->succs
) > 0)
1901 remove_edge (EDGE_SUCC (bb
, 0));
1905 /* Remove statements of basic block BB. */
1908 remove_bb (basic_block bb
)
1910 block_stmt_iterator i
;
1911 #ifdef USE_MAPPED_LOCATION
1912 source_location loc
= UNKNOWN_LOCATION
;
1914 source_locus loc
= 0;
1919 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1920 if (dump_flags
& TDF_DETAILS
)
1922 dump_bb (bb
, dump_file
, 0);
1923 fprintf (dump_file
, "\n");
1929 struct loop
*loop
= bb
->loop_father
;
1931 /* If a loop gets removed, clean up the information associated
1933 if (loop
->latch
== bb
1934 || loop
->header
== bb
)
1935 free_numbers_of_iterations_estimates_loop (loop
);
1938 /* Remove all the instructions in the block. */
1939 for (i
= bsi_start (bb
); !bsi_end_p (i
);)
1941 tree stmt
= bsi_stmt (i
);
1942 if (TREE_CODE (stmt
) == LABEL_EXPR
1943 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt
))
1944 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
))))
1947 block_stmt_iterator new_bsi
;
1949 /* A non-reachable non-local label may still be referenced.
1950 But it no longer needs to carry the extra semantics of
1952 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1954 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)) = 0;
1955 FORCED_LABEL (LABEL_EXPR_LABEL (stmt
)) = 1;
1958 new_bb
= bb
->prev_bb
;
1959 new_bsi
= bsi_start (new_bb
);
1960 bsi_remove (&i
, false);
1961 bsi_insert_before (&new_bsi
, stmt
, BSI_NEW_STMT
);
1965 /* Release SSA definitions if we are in SSA. Note that we
1966 may be called when not in SSA. For example,
1967 final_cleanup calls this function via
1968 cleanup_tree_cfg. */
1969 if (gimple_in_ssa_p (cfun
))
1970 release_defs (stmt
);
1972 bsi_remove (&i
, true);
1975 /* Don't warn for removed gotos. Gotos are often removed due to
1976 jump threading, thus resulting in bogus warnings. Not great,
1977 since this way we lose warnings for gotos in the original
1978 program that are indeed unreachable. */
1979 if (TREE_CODE (stmt
) != GOTO_EXPR
&& EXPR_HAS_LOCATION (stmt
) && !loc
)
1981 #ifdef USE_MAPPED_LOCATION
1982 if (EXPR_HAS_LOCATION (stmt
))
1983 loc
= EXPR_LOCATION (stmt
);
1986 t
= EXPR_LOCUS (stmt
);
1987 if (t
&& LOCATION_LINE (*t
) > 0)
1993 /* If requested, give a warning that the first statement in the
1994 block is unreachable. We walk statements backwards in the
1995 loop above, so the last statement we process is the first statement
1997 #ifdef USE_MAPPED_LOCATION
1998 if (loc
> BUILTINS_LOCATION
)
1999 warning (OPT_Wunreachable_code
, "%Hwill never be executed", &loc
);
2002 warning (OPT_Wunreachable_code
, "%Hwill never be executed", loc
);
2005 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2009 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2010 predicate VAL, return the edge that will be taken out of the block.
2011 If VAL does not match a unique edge, NULL is returned. */
2014 find_taken_edge (basic_block bb
, tree val
)
2018 stmt
= last_stmt (bb
);
2021 gcc_assert (is_ctrl_stmt (stmt
));
2024 if (! is_gimple_min_invariant (val
))
2027 if (TREE_CODE (stmt
) == COND_EXPR
)
2028 return find_taken_edge_cond_expr (bb
, val
);
2030 if (TREE_CODE (stmt
) == SWITCH_EXPR
)
2031 return find_taken_edge_switch_expr (bb
, val
);
2033 if (computed_goto_p (stmt
))
2034 return find_taken_edge_computed_goto (bb
, TREE_OPERAND( val
, 0));
2039 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2040 statement, determine which of the outgoing edges will be taken out of the
2041 block. Return NULL if either edge may be taken. */
2044 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2049 dest
= label_to_block (val
);
2052 e
= find_edge (bb
, dest
);
2053 gcc_assert (e
!= NULL
);
2059 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2060 statement, determine which of the two edges will be taken out of the
2061 block. Return NULL if either edge may be taken. */
2064 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2066 edge true_edge
, false_edge
;
2068 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2070 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2071 return (integer_zerop (val
) ? false_edge
: true_edge
);
2074 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2075 statement, determine which edge will be taken out of the block. Return
2076 NULL if any edge may be taken. */
2079 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2081 tree switch_expr
, taken_case
;
2082 basic_block dest_bb
;
2085 switch_expr
= last_stmt (bb
);
2086 taken_case
= find_case_label_for_value (switch_expr
, val
);
2087 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2089 e
= find_edge (bb
, dest_bb
);
2095 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2096 We can make optimal use here of the fact that the case labels are
2097 sorted: We can do a binary search for a case matching VAL. */
2100 find_case_label_for_value (tree switch_expr
, tree val
)
2102 tree vec
= SWITCH_LABELS (switch_expr
);
2103 size_t low
, high
, n
= TREE_VEC_LENGTH (vec
);
2104 tree default_case
= TREE_VEC_ELT (vec
, n
- 1);
2106 for (low
= -1, high
= n
- 1; high
- low
> 1; )
2108 size_t i
= (high
+ low
) / 2;
2109 tree t
= TREE_VEC_ELT (vec
, i
);
2112 /* Cache the result of comparing CASE_LOW and val. */
2113 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2120 if (CASE_HIGH (t
) == NULL
)
2122 /* A singe-valued case label. */
2128 /* A case range. We can only handle integer ranges. */
2129 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2134 return default_case
;
2140 /*---------------------------------------------------------------------------
2142 ---------------------------------------------------------------------------*/
2144 /* Dump tree-specific information of block BB to file OUTF. */
2147 tree_dump_bb (basic_block bb
, FILE *outf
, int indent
)
2149 dump_generic_bb (outf
, bb
, indent
, TDF_VOPS
|TDF_MEMSYMS
);
2153 /* Dump a basic block on stderr. */
2156 debug_tree_bb (basic_block bb
)
2158 dump_bb (bb
, stderr
, 0);
2162 /* Dump basic block with index N on stderr. */
2165 debug_tree_bb_n (int n
)
2167 debug_tree_bb (BASIC_BLOCK (n
));
2168 return BASIC_BLOCK (n
);
2172 /* Dump the CFG on stderr.
2174 FLAGS are the same used by the tree dumping functions
2175 (see TDF_* in tree-pass.h). */
2178 debug_tree_cfg (int flags
)
2180 dump_tree_cfg (stderr
, flags
);
2184 /* Dump the program showing basic block boundaries on the given FILE.
2186 FLAGS are the same used by the tree dumping functions (see TDF_* in
2190 dump_tree_cfg (FILE *file
, int flags
)
2192 if (flags
& TDF_DETAILS
)
2194 const char *funcname
2195 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2198 fprintf (file
, ";; Function %s\n\n", funcname
);
2199 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2200 n_basic_blocks
, n_edges
, last_basic_block
);
2202 brief_dump_cfg (file
);
2203 fprintf (file
, "\n");
2206 if (flags
& TDF_STATS
)
2207 dump_cfg_stats (file
);
2209 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2213 /* Dump CFG statistics on FILE. */
2216 dump_cfg_stats (FILE *file
)
2218 static long max_num_merged_labels
= 0;
2219 unsigned long size
, total
= 0;
2222 const char * const fmt_str
= "%-30s%-13s%12s\n";
2223 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2224 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2225 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2226 const char *funcname
2227 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2230 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2232 fprintf (file
, "---------------------------------------------------------\n");
2233 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2234 fprintf (file
, fmt_str
, "", " instances ", "used ");
2235 fprintf (file
, "---------------------------------------------------------\n");
2237 size
= n_basic_blocks
* sizeof (struct basic_block_def
);
2239 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks
,
2240 SCALE (size
), LABEL (size
));
2244 num_edges
+= EDGE_COUNT (bb
->succs
);
2245 size
= num_edges
* sizeof (struct edge_def
);
2247 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2249 fprintf (file
, "---------------------------------------------------------\n");
2250 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2252 fprintf (file
, "---------------------------------------------------------\n");
2253 fprintf (file
, "\n");
2255 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2256 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2258 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2259 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2261 fprintf (file
, "\n");
2265 /* Dump CFG statistics on stderr. Keep extern so that it's always
2266 linked in the final executable. */
2269 debug_cfg_stats (void)
2271 dump_cfg_stats (stderr
);
2275 /* Dump the flowgraph to a .vcg FILE. */
2278 tree_cfg2vcg (FILE *file
)
2283 const char *funcname
2284 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2286 /* Write the file header. */
2287 fprintf (file
, "graph: { title: \"%s\"\n", funcname
);
2288 fprintf (file
, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2289 fprintf (file
, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2291 /* Write blocks and edges. */
2292 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2294 fprintf (file
, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2297 if (e
->flags
& EDGE_FAKE
)
2298 fprintf (file
, " linestyle: dotted priority: 10");
2300 fprintf (file
, " linestyle: solid priority: 100");
2302 fprintf (file
, " }\n");
2308 enum tree_code head_code
, end_code
;
2309 const char *head_name
, *end_name
;
2312 tree first
= first_stmt (bb
);
2313 tree last
= last_stmt (bb
);
2317 head_code
= TREE_CODE (first
);
2318 head_name
= tree_code_name
[head_code
];
2319 head_line
= get_lineno (first
);
2322 head_name
= "no-statement";
2326 end_code
= TREE_CODE (last
);
2327 end_name
= tree_code_name
[end_code
];
2328 end_line
= get_lineno (last
);
2331 end_name
= "no-statement";
2333 fprintf (file
, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2334 bb
->index
, bb
->index
, head_name
, head_line
, end_name
,
2337 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2339 if (e
->dest
== EXIT_BLOCK_PTR
)
2340 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb
->index
);
2342 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb
->index
, e
->dest
->index
);
2344 if (e
->flags
& EDGE_FAKE
)
2345 fprintf (file
, " priority: 10 linestyle: dotted");
2347 fprintf (file
, " priority: 100 linestyle: solid");
2349 fprintf (file
, " }\n");
2352 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2356 fputs ("}\n\n", file
);
2361 /*---------------------------------------------------------------------------
2362 Miscellaneous helpers
2363 ---------------------------------------------------------------------------*/
2365 /* Return true if T represents a stmt that always transfers control. */
2368 is_ctrl_stmt (tree t
)
2370 return (TREE_CODE (t
) == COND_EXPR
2371 || TREE_CODE (t
) == SWITCH_EXPR
2372 || TREE_CODE (t
) == GOTO_EXPR
2373 || TREE_CODE (t
) == RETURN_EXPR
2374 || TREE_CODE (t
) == RESX_EXPR
);
2378 /* Return true if T is a statement that may alter the flow of control
2379 (e.g., a call to a non-returning function). */
2382 is_ctrl_altering_stmt (tree t
)
2387 call
= get_call_expr_in (t
);
2390 /* A non-pure/const CALL_EXPR alters flow control if the current
2391 function has nonlocal labels. */
2392 if (TREE_SIDE_EFFECTS (call
) && current_function_has_nonlocal_label
)
2395 /* A CALL_EXPR also alters control flow if it does not return. */
2396 if (call_expr_flags (call
) & ECF_NORETURN
)
2400 /* OpenMP directives alter control flow. */
2401 if (OMP_DIRECTIVE_P (t
))
2404 /* If a statement can throw, it alters control flow. */
2405 return tree_can_throw_internal (t
);
2409 /* Return true if T is a computed goto. */
2412 computed_goto_p (tree t
)
2414 return (TREE_CODE (t
) == GOTO_EXPR
2415 && TREE_CODE (GOTO_DESTINATION (t
)) != LABEL_DECL
);
2419 /* Return true if T is a simple local goto. */
2422 simple_goto_p (tree t
)
2424 return (TREE_CODE (t
) == GOTO_EXPR
2425 && TREE_CODE (GOTO_DESTINATION (t
)) == LABEL_DECL
);
2429 /* Return true if T can make an abnormal transfer of control flow.
2430 Transfers of control flow associated with EH are excluded. */
2433 tree_can_make_abnormal_goto (tree t
)
2435 if (computed_goto_p (t
))
2437 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
2438 t
= GIMPLE_STMT_OPERAND (t
, 1);
2439 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2440 t
= TREE_OPERAND (t
, 0);
2441 if (TREE_CODE (t
) == CALL_EXPR
)
2442 return TREE_SIDE_EFFECTS (t
) && current_function_has_nonlocal_label
;
2447 /* Return true if T should start a new basic block. PREV_T is the
2448 statement preceding T. It is used when T is a label or a case label.
2449 Labels should only start a new basic block if their previous statement
2450 wasn't a label. Otherwise, sequence of labels would generate
2451 unnecessary basic blocks that only contain a single label. */
2454 stmt_starts_bb_p (tree t
, tree prev_t
)
2459 /* LABEL_EXPRs start a new basic block only if the preceding
2460 statement wasn't a label of the same type. This prevents the
2461 creation of consecutive blocks that have nothing but a single
2463 if (TREE_CODE (t
) == LABEL_EXPR
)
2465 /* Nonlocal and computed GOTO targets always start a new block. */
2466 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t
))
2467 || FORCED_LABEL (LABEL_EXPR_LABEL (t
)))
2470 if (prev_t
&& TREE_CODE (prev_t
) == LABEL_EXPR
)
2472 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t
)))
2475 cfg_stats
.num_merged_labels
++;
2486 /* Return true if T should end a basic block. */
2489 stmt_ends_bb_p (tree t
)
2491 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2495 /* Add gotos that used to be represented implicitly in the CFG. */
2498 disband_implicit_edges (void)
2501 block_stmt_iterator last
;
2508 last
= bsi_last (bb
);
2509 stmt
= last_stmt (bb
);
2511 if (stmt
&& TREE_CODE (stmt
) == COND_EXPR
)
2513 /* Remove superfluous gotos from COND_EXPR branches. Moved
2514 from cfg_remove_useless_stmts here since it violates the
2515 invariants for tree--cfg correspondence and thus fits better
2516 here where we do it anyway. */
2517 e
= find_edge (bb
, bb
->next_bb
);
2520 if (e
->flags
& EDGE_TRUE_VALUE
)
2521 COND_EXPR_THEN (stmt
) = build_empty_stmt ();
2522 else if (e
->flags
& EDGE_FALSE_VALUE
)
2523 COND_EXPR_ELSE (stmt
) = build_empty_stmt ();
2526 e
->flags
|= EDGE_FALLTHRU
;
2532 if (stmt
&& TREE_CODE (stmt
) == RETURN_EXPR
)
2534 /* Remove the RETURN_EXPR if we may fall though to the exit
2536 gcc_assert (single_succ_p (bb
));
2537 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
2539 if (bb
->next_bb
== EXIT_BLOCK_PTR
2540 && !TREE_OPERAND (stmt
, 0))
2542 bsi_remove (&last
, true);
2543 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
2548 /* There can be no fallthru edge if the last statement is a control
2550 if (stmt
&& is_ctrl_stmt (stmt
))
2553 /* Find a fallthru edge and emit the goto if necessary. */
2554 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2555 if (e
->flags
& EDGE_FALLTHRU
)
2558 if (!e
|| e
->dest
== bb
->next_bb
)
2561 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
2562 label
= tree_block_label (e
->dest
);
2564 stmt
= build1 (GOTO_EXPR
, void_type_node
, label
);
2565 #ifdef USE_MAPPED_LOCATION
2566 SET_EXPR_LOCATION (stmt
, e
->goto_locus
);
2568 SET_EXPR_LOCUS (stmt
, e
->goto_locus
);
2570 bsi_insert_after (&last
, stmt
, BSI_NEW_STMT
);
2571 e
->flags
&= ~EDGE_FALLTHRU
;
2575 /* Remove block annotations and other datastructures. */
2578 delete_tree_cfg_annotations (void)
2581 block_stmt_iterator bsi
;
2583 /* Remove annotations from every tree in the function. */
2585 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
2587 tree stmt
= bsi_stmt (bsi
);
2588 ggc_free (stmt
->base
.ann
);
2589 stmt
->base
.ann
= NULL
;
2591 label_to_block_map
= NULL
;
2595 /* Return the first statement in basic block BB. */
2598 first_stmt (basic_block bb
)
2600 block_stmt_iterator i
= bsi_start (bb
);
2601 return !bsi_end_p (i
) ? bsi_stmt (i
) : NULL_TREE
;
2605 /* Return the last statement in basic block BB. */
2608 last_stmt (basic_block bb
)
2610 block_stmt_iterator b
= bsi_last (bb
);
2611 return !bsi_end_p (b
) ? bsi_stmt (b
) : NULL_TREE
;
2615 /* Return the last statement of an otherwise empty block. Return NULL
2616 if the block is totally empty, or if it contains more than one
2620 last_and_only_stmt (basic_block bb
)
2622 block_stmt_iterator i
= bsi_last (bb
);
2628 last
= bsi_stmt (i
);
2633 /* Empty statements should no longer appear in the instruction stream.
2634 Everything that might have appeared before should be deleted by
2635 remove_useless_stmts, and the optimizers should just bsi_remove
2636 instead of smashing with build_empty_stmt.
2638 Thus the only thing that should appear here in a block containing
2639 one executable statement is a label. */
2640 prev
= bsi_stmt (i
);
2641 if (TREE_CODE (prev
) == LABEL_EXPR
)
2648 /* Mark BB as the basic block holding statement T. */
2651 set_bb_for_stmt (tree t
, basic_block bb
)
2653 if (TREE_CODE (t
) == PHI_NODE
)
2655 else if (TREE_CODE (t
) == STATEMENT_LIST
)
2657 tree_stmt_iterator i
;
2658 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2659 set_bb_for_stmt (tsi_stmt (i
), bb
);
2663 stmt_ann_t ann
= get_stmt_ann (t
);
2666 /* If the statement is a label, add the label to block-to-labels map
2667 so that we can speed up edge creation for GOTO_EXPRs. */
2668 if (TREE_CODE (t
) == LABEL_EXPR
)
2672 t
= LABEL_EXPR_LABEL (t
);
2673 uid
= LABEL_DECL_UID (t
);
2676 unsigned old_len
= VEC_length (basic_block
, label_to_block_map
);
2677 LABEL_DECL_UID (t
) = uid
= cfun
->last_label_uid
++;
2678 if (old_len
<= (unsigned) uid
)
2680 unsigned new_len
= 3 * uid
/ 2;
2682 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
2687 /* We're moving an existing label. Make sure that we've
2688 removed it from the old block. */
2690 || !VEC_index (basic_block
, label_to_block_map
, uid
));
2691 VEC_replace (basic_block
, label_to_block_map
, uid
, bb
);
2696 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2697 from one basic block to another.
2698 For BB splitting we can run into quadratic case, so performance is quite
2699 important and knowing that the tables are big enough, change_bb_for_stmt
2700 can inline as leaf function. */
2702 change_bb_for_stmt (tree t
, basic_block bb
)
2704 get_stmt_ann (t
)->bb
= bb
;
2705 if (TREE_CODE (t
) == LABEL_EXPR
)
2706 VEC_replace (basic_block
, label_to_block_map
,
2707 LABEL_DECL_UID (LABEL_EXPR_LABEL (t
)), bb
);
2710 /* Finds iterator for STMT. */
2712 extern block_stmt_iterator
2713 bsi_for_stmt (tree stmt
)
2715 block_stmt_iterator bsi
;
2717 for (bsi
= bsi_start (bb_for_stmt (stmt
)); !bsi_end_p (bsi
); bsi_next (&bsi
))
2718 if (bsi_stmt (bsi
) == stmt
)
2724 /* Mark statement T as modified, and update it. */
2726 update_modified_stmts (tree t
)
2728 if (!ssa_operands_active ())
2730 if (TREE_CODE (t
) == STATEMENT_LIST
)
2732 tree_stmt_iterator i
;
2734 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2736 stmt
= tsi_stmt (i
);
2737 update_stmt_if_modified (stmt
);
2741 update_stmt_if_modified (t
);
2744 /* Insert statement (or statement list) T before the statement
2745 pointed-to by iterator I. M specifies how to update iterator I
2746 after insertion (see enum bsi_iterator_update). */
2749 bsi_insert_before (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2751 set_bb_for_stmt (t
, i
->bb
);
2752 update_modified_stmts (t
);
2753 tsi_link_before (&i
->tsi
, t
, m
);
2757 /* Insert statement (or statement list) T after the statement
2758 pointed-to by iterator I. M specifies how to update iterator I
2759 after insertion (see enum bsi_iterator_update). */
2762 bsi_insert_after (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2764 set_bb_for_stmt (t
, i
->bb
);
2765 update_modified_stmts (t
);
2766 tsi_link_after (&i
->tsi
, t
, m
);
2770 /* Remove the statement pointed to by iterator I. The iterator is updated
2771 to the next statement.
2773 When REMOVE_EH_INFO is true we remove the statement pointed to by
2774 iterator I from the EH tables. Otherwise we do not modify the EH
2777 Generally, REMOVE_EH_INFO should be true when the statement is going to
2778 be removed from the IL and not reinserted elsewhere. */
2781 bsi_remove (block_stmt_iterator
*i
, bool remove_eh_info
)
2783 tree t
= bsi_stmt (*i
);
2784 set_bb_for_stmt (t
, NULL
);
2785 delink_stmt_imm_use (t
);
2786 tsi_delink (&i
->tsi
);
2787 mark_stmt_modified (t
);
2790 remove_stmt_from_eh_region (t
);
2791 gimple_remove_stmt_histograms (cfun
, t
);
2796 /* Move the statement at FROM so it comes right after the statement at TO. */
2799 bsi_move_after (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2801 tree stmt
= bsi_stmt (*from
);
2802 bsi_remove (from
, false);
2803 bsi_insert_after (to
, stmt
, BSI_SAME_STMT
);
2807 /* Move the statement at FROM so it comes right before the statement at TO. */
2810 bsi_move_before (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2812 tree stmt
= bsi_stmt (*from
);
2813 bsi_remove (from
, false);
2814 bsi_insert_before (to
, stmt
, BSI_SAME_STMT
);
2818 /* Move the statement at FROM to the end of basic block BB. */
2821 bsi_move_to_bb_end (block_stmt_iterator
*from
, basic_block bb
)
2823 block_stmt_iterator last
= bsi_last (bb
);
2825 /* Have to check bsi_end_p because it could be an empty block. */
2826 if (!bsi_end_p (last
) && is_ctrl_stmt (bsi_stmt (last
)))
2827 bsi_move_before (from
, &last
);
2829 bsi_move_after (from
, &last
);
2833 /* Replace the contents of the statement pointed to by iterator BSI
2834 with STMT. If UPDATE_EH_INFO is true, the exception handling
2835 information of the original statement is moved to the new statement. */
2838 bsi_replace (const block_stmt_iterator
*bsi
, tree stmt
, bool update_eh_info
)
2841 tree orig_stmt
= bsi_stmt (*bsi
);
2843 if (stmt
== orig_stmt
)
2845 SET_EXPR_LOCUS (stmt
, EXPR_LOCUS (orig_stmt
));
2846 set_bb_for_stmt (stmt
, bsi
->bb
);
2848 /* Preserve EH region information from the original statement, if
2849 requested by the caller. */
2852 eh_region
= lookup_stmt_eh_region (orig_stmt
);
2855 remove_stmt_from_eh_region (orig_stmt
);
2856 add_stmt_to_eh_region (stmt
, eh_region
);
2860 gimple_duplicate_stmt_histograms (cfun
, stmt
, cfun
, orig_stmt
);
2861 gimple_remove_stmt_histograms (cfun
, orig_stmt
);
2862 delink_stmt_imm_use (orig_stmt
);
2863 *bsi_stmt_ptr (*bsi
) = stmt
;
2864 mark_stmt_modified (stmt
);
2865 update_modified_stmts (stmt
);
2869 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2870 is made to place the statement in an existing basic block, but
2871 sometimes that isn't possible. When it isn't possible, the edge is
2872 split and the statement is added to the new block.
2874 In all cases, the returned *BSI points to the correct location. The
2875 return value is true if insertion should be done after the location,
2876 or false if it should be done before the location. If new basic block
2877 has to be created, it is stored in *NEW_BB. */
2880 tree_find_edge_insert_loc (edge e
, block_stmt_iterator
*bsi
,
2881 basic_block
*new_bb
)
2883 basic_block dest
, src
;
2889 /* If the destination has one predecessor which has no PHI nodes,
2890 insert there. Except for the exit block.
2892 The requirement for no PHI nodes could be relaxed. Basically we
2893 would have to examine the PHIs to prove that none of them used
2894 the value set by the statement we want to insert on E. That
2895 hardly seems worth the effort. */
2896 if (single_pred_p (dest
)
2897 && ! phi_nodes (dest
)
2898 && dest
!= EXIT_BLOCK_PTR
)
2900 *bsi
= bsi_start (dest
);
2901 if (bsi_end_p (*bsi
))
2904 /* Make sure we insert after any leading labels. */
2905 tmp
= bsi_stmt (*bsi
);
2906 while (TREE_CODE (tmp
) == LABEL_EXPR
)
2909 if (bsi_end_p (*bsi
))
2911 tmp
= bsi_stmt (*bsi
);
2914 if (bsi_end_p (*bsi
))
2916 *bsi
= bsi_last (dest
);
2923 /* If the source has one successor, the edge is not abnormal and
2924 the last statement does not end a basic block, insert there.
2925 Except for the entry block. */
2927 if ((e
->flags
& EDGE_ABNORMAL
) == 0
2928 && single_succ_p (src
)
2929 && src
!= ENTRY_BLOCK_PTR
)
2931 *bsi
= bsi_last (src
);
2932 if (bsi_end_p (*bsi
))
2935 tmp
= bsi_stmt (*bsi
);
2936 if (!stmt_ends_bb_p (tmp
))
2939 /* Insert code just before returning the value. We may need to decompose
2940 the return in the case it contains non-trivial operand. */
2941 if (TREE_CODE (tmp
) == RETURN_EXPR
)
2943 tree op
= TREE_OPERAND (tmp
, 0);
2944 if (op
&& !is_gimple_val (op
))
2946 gcc_assert (TREE_CODE (op
) == GIMPLE_MODIFY_STMT
);
2947 bsi_insert_before (bsi
, op
, BSI_NEW_STMT
);
2948 TREE_OPERAND (tmp
, 0) = GIMPLE_STMT_OPERAND (op
, 0);
2955 /* Otherwise, create a new basic block, and split this edge. */
2956 dest
= split_edge (e
);
2959 e
= single_pred_edge (dest
);
2964 /* This routine will commit all pending edge insertions, creating any new
2965 basic blocks which are necessary. */
2968 bsi_commit_edge_inserts (void)
2974 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR
), NULL
);
2977 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2978 bsi_commit_one_edge_insert (e
, NULL
);
2982 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2983 to this block, otherwise set it to NULL. */
2986 bsi_commit_one_edge_insert (edge e
, basic_block
*new_bb
)
2990 if (PENDING_STMT (e
))
2992 block_stmt_iterator bsi
;
2993 tree stmt
= PENDING_STMT (e
);
2995 PENDING_STMT (e
) = NULL_TREE
;
2997 if (tree_find_edge_insert_loc (e
, &bsi
, new_bb
))
2998 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3000 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3005 /* Add STMT to the pending list of edge E. No actual insertion is
3006 made until a call to bsi_commit_edge_inserts () is made. */
3009 bsi_insert_on_edge (edge e
, tree stmt
)
3011 append_to_statement_list (stmt
, &PENDING_STMT (e
));
3014 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3015 block has to be created, it is returned. */
3018 bsi_insert_on_edge_immediate (edge e
, tree stmt
)
3020 block_stmt_iterator bsi
;
3021 basic_block new_bb
= NULL
;
3023 gcc_assert (!PENDING_STMT (e
));
3025 if (tree_find_edge_insert_loc (e
, &bsi
, &new_bb
))
3026 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3028 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3033 /*---------------------------------------------------------------------------
3034 Tree specific functions for CFG manipulation
3035 ---------------------------------------------------------------------------*/
3037 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3040 reinstall_phi_args (edge new_edge
, edge old_edge
)
3044 if (!PENDING_STMT (old_edge
))
3047 for (var
= PENDING_STMT (old_edge
), phi
= phi_nodes (new_edge
->dest
);
3049 var
= TREE_CHAIN (var
), phi
= PHI_CHAIN (phi
))
3051 tree result
= TREE_PURPOSE (var
);
3052 tree arg
= TREE_VALUE (var
);
3054 gcc_assert (result
== PHI_RESULT (phi
));
3056 add_phi_arg (phi
, arg
, new_edge
);
3059 PENDING_STMT (old_edge
) = NULL
;
3062 /* Returns the basic block after which the new basic block created
3063 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3064 near its "logical" location. This is of most help to humans looking
3065 at debugging dumps. */
3068 split_edge_bb_loc (edge edge_in
)
3070 basic_block dest
= edge_in
->dest
;
3072 if (dest
->prev_bb
&& find_edge (dest
->prev_bb
, dest
))
3073 return edge_in
->src
;
3075 return dest
->prev_bb
;
3078 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3079 Abort on abnormal edges. */
3082 tree_split_edge (edge edge_in
)
3084 basic_block new_bb
, after_bb
, dest
;
3087 /* Abnormal edges cannot be split. */
3088 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
3090 dest
= edge_in
->dest
;
3092 after_bb
= split_edge_bb_loc (edge_in
);
3094 new_bb
= create_empty_bb (after_bb
);
3095 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
3096 new_bb
->count
= edge_in
->count
;
3097 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
3098 new_edge
->probability
= REG_BR_PROB_BASE
;
3099 new_edge
->count
= edge_in
->count
;
3101 e
= redirect_edge_and_branch (edge_in
, new_bb
);
3103 reinstall_phi_args (new_edge
, e
);
3109 /* Return true when BB has label LABEL in it. */
3112 has_label_p (basic_block bb
, tree label
)
3114 block_stmt_iterator bsi
;
3116 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3118 tree stmt
= bsi_stmt (bsi
);
3120 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3122 if (LABEL_EXPR_LABEL (stmt
) == label
)
3129 /* Callback for walk_tree, check that all elements with address taken are
3130 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3131 inside a PHI node. */
3134 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3137 bool in_phi
= (data
!= NULL
);
3142 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3143 #define CHECK_OP(N, MSG) \
3144 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3145 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3147 switch (TREE_CODE (t
))
3150 if (SSA_NAME_IN_FREE_LIST (t
))
3152 error ("SSA name in freelist but still referenced");
3158 x
= fold (ASSERT_EXPR_COND (t
));
3159 if (x
== boolean_false_node
)
3161 error ("ASSERT_EXPR with an always-false condition");
3169 case GIMPLE_MODIFY_STMT
:
3170 x
= GIMPLE_STMT_OPERAND (t
, 0);
3171 if (TREE_CODE (x
) == BIT_FIELD_REF
3172 && is_gimple_reg (TREE_OPERAND (x
, 0)))
3174 error ("GIMPLE register modified with BIT_FIELD_REF");
3183 bool old_side_effects
;
3186 bool new_side_effects
;
3188 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3189 dead PHIs that take the address of something. But if the PHI
3190 result is dead, the fact that it takes the address of anything
3191 is irrelevant. Because we can not tell from here if a PHI result
3192 is dead, we just skip this check for PHIs altogether. This means
3193 we may be missing "valid" checks, but what can you do?
3194 This was PR19217. */
3198 old_invariant
= TREE_INVARIANT (t
);
3199 old_constant
= TREE_CONSTANT (t
);
3200 old_side_effects
= TREE_SIDE_EFFECTS (t
);
3202 recompute_tree_invariant_for_addr_expr (t
);
3203 new_invariant
= TREE_INVARIANT (t
);
3204 new_side_effects
= TREE_SIDE_EFFECTS (t
);
3205 new_constant
= TREE_CONSTANT (t
);
3207 if (old_invariant
!= new_invariant
)
3209 error ("invariant not recomputed when ADDR_EXPR changed");
3213 if (old_constant
!= new_constant
)
3215 error ("constant not recomputed when ADDR_EXPR changed");
3218 if (old_side_effects
!= new_side_effects
)
3220 error ("side effects not recomputed when ADDR_EXPR changed");
3224 /* Skip any references (they will be checked when we recurse down the
3225 tree) and ensure that any variable used as a prefix is marked
3227 for (x
= TREE_OPERAND (t
, 0);
3228 handled_component_p (x
);
3229 x
= TREE_OPERAND (x
, 0))
3232 if (TREE_CODE (x
) != VAR_DECL
&& TREE_CODE (x
) != PARM_DECL
)
3234 if (!TREE_ADDRESSABLE (x
))
3236 error ("address taken, but ADDRESSABLE bit not set");
3243 x
= COND_EXPR_COND (t
);
3244 if (TREE_CODE (TREE_TYPE (x
)) != BOOLEAN_TYPE
)
3246 error ("non-boolean used in condition");
3249 if (!is_gimple_condexpr (x
))
3251 error ("invalid conditional operand");
3258 case FIX_TRUNC_EXPR
:
3263 case NON_LVALUE_EXPR
:
3264 case TRUTH_NOT_EXPR
:
3265 CHECK_OP (0, "invalid operand to unary operator");
3272 case ARRAY_RANGE_REF
:
3274 case VIEW_CONVERT_EXPR
:
3275 /* We have a nest of references. Verify that each of the operands
3276 that determine where to reference is either a constant or a variable,
3277 verify that the base is valid, and then show we've already checked
3279 while (handled_component_p (t
))
3281 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3282 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3283 else if (TREE_CODE (t
) == ARRAY_REF
3284 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3286 CHECK_OP (1, "invalid array index");
3287 if (TREE_OPERAND (t
, 2))
3288 CHECK_OP (2, "invalid array lower bound");
3289 if (TREE_OPERAND (t
, 3))
3290 CHECK_OP (3, "invalid array stride");
3292 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
3294 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3295 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3298 t
= TREE_OPERAND (t
, 0);
3301 if (!CONSTANT_CLASS_P (t
) && !is_gimple_lvalue (t
))
3303 error ("invalid reference prefix");
3315 case UNORDERED_EXPR
:
3326 case TRUNC_DIV_EXPR
:
3328 case FLOOR_DIV_EXPR
:
3329 case ROUND_DIV_EXPR
:
3330 case TRUNC_MOD_EXPR
:
3332 case FLOOR_MOD_EXPR
:
3333 case ROUND_MOD_EXPR
:
3335 case EXACT_DIV_EXPR
:
3345 CHECK_OP (0, "invalid operand to binary operator");
3346 CHECK_OP (1, "invalid operand to binary operator");
3350 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3363 /* Verify STMT, return true if STMT is not in GIMPLE form.
3364 TODO: Implement type checking. */
3367 verify_stmt (tree stmt
, bool last_in_block
)
3371 if (OMP_DIRECTIVE_P (stmt
))
3373 /* OpenMP directives are validated by the FE and never operated
3374 on by the optimizers. Furthermore, OMP_FOR may contain
3375 non-gimple expressions when the main index variable has had
3376 its address taken. This does not affect the loop itself
3377 because the header of an OMP_FOR is merely used to determine
3378 how to setup the parallel iteration. */
3382 if (!is_gimple_stmt (stmt
))
3384 error ("is not a valid GIMPLE statement");
3388 addr
= walk_tree (&stmt
, verify_expr
, NULL
, NULL
);
3391 debug_generic_stmt (addr
);
3395 /* If the statement is marked as part of an EH region, then it is
3396 expected that the statement could throw. Verify that when we
3397 have optimizations that simplify statements such that we prove
3398 that they cannot throw, that we update other data structures
3400 if (lookup_stmt_eh_region (stmt
) >= 0)
3402 if (!tree_could_throw_p (stmt
))
3404 error ("statement marked for throw, but doesn%'t");
3407 if (!last_in_block
&& tree_can_throw_internal (stmt
))
3409 error ("statement marked for throw in middle of block");
3417 debug_generic_stmt (stmt
);
3422 /* Return true when the T can be shared. */
3425 tree_node_can_be_shared (tree t
)
3427 if (IS_TYPE_OR_DECL_P (t
)
3428 || is_gimple_min_invariant (t
)
3429 || TREE_CODE (t
) == SSA_NAME
3430 || t
== error_mark_node
3431 || TREE_CODE (t
) == IDENTIFIER_NODE
)
3434 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
3437 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3438 && is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3439 || TREE_CODE (t
) == COMPONENT_REF
3440 || TREE_CODE (t
) == REALPART_EXPR
3441 || TREE_CODE (t
) == IMAGPART_EXPR
)
3442 t
= TREE_OPERAND (t
, 0);
3451 /* Called via walk_trees. Verify tree sharing. */
3454 verify_node_sharing (tree
* tp
, int *walk_subtrees
, void *data
)
3456 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
3458 if (tree_node_can_be_shared (*tp
))
3460 *walk_subtrees
= false;
3464 if (pointer_set_insert (visited
, *tp
))
3471 /* Helper function for verify_gimple_tuples. */
3474 verify_gimple_tuples_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
3475 void *data ATTRIBUTE_UNUSED
)
3477 switch (TREE_CODE (*tp
))
3480 error ("unexpected non-tuple");
3490 /* Verify that there are no trees that should have been converted to
3491 gimple tuples. Return true if T contains a node that should have
3492 been converted to a gimple tuple, but hasn't. */
3495 verify_gimple_tuples (tree t
)
3497 return walk_tree (&t
, verify_gimple_tuples_1
, NULL
, NULL
) != NULL
;
3500 static bool eh_error_found
;
3502 verify_eh_throw_stmt_node (void **slot
, void *data
)
3504 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
3505 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
3507 if (!pointer_set_contains (visited
, node
->stmt
))
3509 error ("Dead STMT in EH table");
3510 debug_generic_stmt (node
->stmt
);
3511 eh_error_found
= true;
3516 /* Verify the GIMPLE statement chain. */
3522 block_stmt_iterator bsi
;
3524 struct pointer_set_t
*visited
, *visited_stmts
;
3527 timevar_push (TV_TREE_STMT_VERIFY
);
3528 visited
= pointer_set_create ();
3529 visited_stmts
= pointer_set_create ();
3536 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
3538 int phi_num_args
= PHI_NUM_ARGS (phi
);
3540 pointer_set_insert (visited_stmts
, phi
);
3541 if (bb_for_stmt (phi
) != bb
)
3543 error ("bb_for_stmt (phi) is set to a wrong basic block");
3547 for (i
= 0; i
< phi_num_args
; i
++)
3549 tree t
= PHI_ARG_DEF (phi
, i
);
3552 /* Addressable variables do have SSA_NAMEs but they
3553 are not considered gimple values. */
3554 if (TREE_CODE (t
) != SSA_NAME
3555 && TREE_CODE (t
) != FUNCTION_DECL
3556 && !is_gimple_val (t
))
3558 error ("PHI def is not a GIMPLE value");
3559 debug_generic_stmt (phi
);
3560 debug_generic_stmt (t
);
3564 addr
= walk_tree (&t
, verify_expr
, (void *) 1, NULL
);
3567 debug_generic_stmt (addr
);
3571 addr
= walk_tree (&t
, verify_node_sharing
, visited
, NULL
);
3574 error ("incorrect sharing of tree nodes");
3575 debug_generic_stmt (phi
);
3576 debug_generic_stmt (addr
);
3582 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
3584 tree stmt
= bsi_stmt (bsi
);
3586 pointer_set_insert (visited_stmts
, stmt
);
3587 err
|= verify_gimple_tuples (stmt
);
3589 if (bb_for_stmt (stmt
) != bb
)
3591 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3596 err
|= verify_stmt (stmt
, bsi_end_p (bsi
));
3597 addr
= walk_tree (&stmt
, verify_node_sharing
, visited
, NULL
);
3600 error ("incorrect sharing of tree nodes");
3601 debug_generic_stmt (stmt
);
3602 debug_generic_stmt (addr
);
3607 eh_error_found
= false;
3608 if (get_eh_throw_stmt_table (cfun
))
3609 htab_traverse (get_eh_throw_stmt_table (cfun
),
3610 verify_eh_throw_stmt_node
,
3613 if (err
| eh_error_found
)
3614 internal_error ("verify_stmts failed");
3616 pointer_set_destroy (visited
);
3617 pointer_set_destroy (visited_stmts
);
3618 verify_histograms ();
3619 timevar_pop (TV_TREE_STMT_VERIFY
);
3623 /* Verifies that the flow information is OK. */
3626 tree_verify_flow_info (void)
3630 block_stmt_iterator bsi
;
3635 if (ENTRY_BLOCK_PTR
->stmt_list
)
3637 error ("ENTRY_BLOCK has a statement list associated with it");
3641 if (EXIT_BLOCK_PTR
->stmt_list
)
3643 error ("EXIT_BLOCK has a statement list associated with it");
3647 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
3648 if (e
->flags
& EDGE_FALLTHRU
)
3650 error ("fallthru to exit from bb %d", e
->src
->index
);
3656 bool found_ctrl_stmt
= false;
3660 /* Skip labels on the start of basic block. */
3661 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3663 tree prev_stmt
= stmt
;
3665 stmt
= bsi_stmt (bsi
);
3667 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3670 if (prev_stmt
&& DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
3672 error ("nonlocal label ");
3673 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
3674 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
3679 if (label_to_block (LABEL_EXPR_LABEL (stmt
)) != bb
)
3682 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
3683 fprintf (stderr
, " to block does not match in bb %d",
3688 if (decl_function_context (LABEL_EXPR_LABEL (stmt
))
3689 != current_function_decl
)
3692 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
3693 fprintf (stderr
, " has incorrect context in bb %d",
3699 /* Verify that body of basic block BB is free of control flow. */
3700 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
3702 tree stmt
= bsi_stmt (bsi
);
3704 if (found_ctrl_stmt
)
3706 error ("control flow in the middle of basic block %d",
3711 if (stmt_ends_bb_p (stmt
))
3712 found_ctrl_stmt
= true;
3714 if (TREE_CODE (stmt
) == LABEL_EXPR
)
3717 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
3718 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
3723 bsi
= bsi_last (bb
);
3724 if (bsi_end_p (bsi
))
3727 stmt
= bsi_stmt (bsi
);
3729 err
|= verify_eh_edges (stmt
);
3731 if (is_ctrl_stmt (stmt
))
3733 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3734 if (e
->flags
& EDGE_FALLTHRU
)
3736 error ("fallthru edge after a control statement in bb %d",
3742 if (TREE_CODE (stmt
) != COND_EXPR
)
3744 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3745 after anything else but if statement. */
3746 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3747 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
3749 error ("true/false edge after a non-COND_EXPR in bb %d",
3755 switch (TREE_CODE (stmt
))
3761 if (TREE_CODE (COND_EXPR_THEN (stmt
)) != GOTO_EXPR
3762 || TREE_CODE (COND_EXPR_ELSE (stmt
)) != GOTO_EXPR
)
3764 error ("structured COND_EXPR at the end of bb %d", bb
->index
);
3768 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
3770 if (!true_edge
|| !false_edge
3771 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
3772 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
3773 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3774 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3775 || EDGE_COUNT (bb
->succs
) >= 3)
3777 error ("wrong outgoing edge flags at end of bb %d",
3782 if (!has_label_p (true_edge
->dest
,
3783 GOTO_DESTINATION (COND_EXPR_THEN (stmt
))))
3785 error ("%<then%> label does not match edge at end of bb %d",
3790 if (!has_label_p (false_edge
->dest
,
3791 GOTO_DESTINATION (COND_EXPR_ELSE (stmt
))))
3793 error ("%<else%> label does not match edge at end of bb %d",
3801 if (simple_goto_p (stmt
))
3803 error ("explicit goto at end of bb %d", bb
->index
);
3808 /* FIXME. We should double check that the labels in the
3809 destination blocks have their address taken. */
3810 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3811 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
3812 | EDGE_FALSE_VALUE
))
3813 || !(e
->flags
& EDGE_ABNORMAL
))
3815 error ("wrong outgoing edge flags at end of bb %d",
3823 if (!single_succ_p (bb
)
3824 || (single_succ_edge (bb
)->flags
3825 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
3826 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3828 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
3831 if (single_succ (bb
) != EXIT_BLOCK_PTR
)
3833 error ("return edge does not point to exit in bb %d",
3846 vec
= SWITCH_LABELS (stmt
);
3847 n
= TREE_VEC_LENGTH (vec
);
3849 /* Mark all the destination basic blocks. */
3850 for (i
= 0; i
< n
; ++i
)
3852 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3853 basic_block label_bb
= label_to_block (lab
);
3855 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
3856 label_bb
->aux
= (void *)1;
3859 /* Verify that the case labels are sorted. */
3860 prev
= TREE_VEC_ELT (vec
, 0);
3861 for (i
= 1; i
< n
- 1; ++i
)
3863 tree c
= TREE_VEC_ELT (vec
, i
);
3866 error ("found default case not at end of case vector");
3870 if (! tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
3872 error ("case labels not sorted: ");
3873 print_generic_expr (stderr
, prev
, 0);
3874 fprintf (stderr
," is greater than ");
3875 print_generic_expr (stderr
, c
, 0);
3876 fprintf (stderr
," but comes before it.\n");
3881 if (CASE_LOW (TREE_VEC_ELT (vec
, n
- 1)))
3883 error ("no default case found at end of case vector");
3887 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3891 error ("extra outgoing edge %d->%d",
3892 bb
->index
, e
->dest
->index
);
3895 e
->dest
->aux
= (void *)2;
3896 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3897 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3899 error ("wrong outgoing edge flags at end of bb %d",
3905 /* Check that we have all of them. */
3906 for (i
= 0; i
< n
; ++i
)
3908 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3909 basic_block label_bb
= label_to_block (lab
);
3911 if (label_bb
->aux
!= (void *)2)
3913 error ("missing edge %i->%i",
3914 bb
->index
, label_bb
->index
);
3919 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3920 e
->dest
->aux
= (void *)0;
3927 if (dom_computed
[CDI_DOMINATORS
] >= DOM_NO_FAST_QUERY
)
3928 verify_dominators (CDI_DOMINATORS
);
3934 /* Updates phi nodes after creating a forwarder block joined
3935 by edge FALLTHRU. */
3938 tree_make_forwarder_block (edge fallthru
)
3942 basic_block dummy
, bb
;
3943 tree phi
, new_phi
, var
;
3945 dummy
= fallthru
->src
;
3946 bb
= fallthru
->dest
;
3948 if (single_pred_p (bb
))
3951 /* If we redirected a branch we must create new PHI nodes at the
3953 for (phi
= phi_nodes (dummy
); phi
; phi
= PHI_CHAIN (phi
))
3955 var
= PHI_RESULT (phi
);
3956 new_phi
= create_phi_node (var
, bb
);
3957 SSA_NAME_DEF_STMT (var
) = new_phi
;
3958 SET_PHI_RESULT (phi
, make_ssa_name (SSA_NAME_VAR (var
), phi
));
3959 add_phi_arg (new_phi
, PHI_RESULT (phi
), fallthru
);
3962 /* Ensure that the PHI node chain is in the same order. */
3963 set_phi_nodes (bb
, phi_reverse (phi_nodes (bb
)));
3965 /* Add the arguments we have stored on edges. */
3966 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3971 flush_pending_stmts (e
);
3976 /* Return a non-special label in the head of basic block BLOCK.
3977 Create one if it doesn't exist. */
3980 tree_block_label (basic_block bb
)
3982 block_stmt_iterator i
, s
= bsi_start (bb
);
3986 for (i
= s
; !bsi_end_p (i
); first
= false, bsi_next (&i
))
3988 stmt
= bsi_stmt (i
);
3989 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3991 label
= LABEL_EXPR_LABEL (stmt
);
3992 if (!DECL_NONLOCAL (label
))
3995 bsi_move_before (&i
, &s
);
4000 label
= create_artificial_label ();
4001 stmt
= build1 (LABEL_EXPR
, void_type_node
, label
);
4002 bsi_insert_before (&s
, stmt
, BSI_NEW_STMT
);
4007 /* Attempt to perform edge redirection by replacing a possibly complex
4008 jump instruction by a goto or by removing the jump completely.
4009 This can apply only if all edges now point to the same block. The
4010 parameters and return values are equivalent to
4011 redirect_edge_and_branch. */
4014 tree_try_redirect_by_replacing_jump (edge e
, basic_block target
)
4016 basic_block src
= e
->src
;
4017 block_stmt_iterator b
;
4020 /* We can replace or remove a complex jump only when we have exactly
4022 if (EDGE_COUNT (src
->succs
) != 2
4023 /* Verify that all targets will be TARGET. Specifically, the
4024 edge that is not E must also go to TARGET. */
4025 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
4031 stmt
= bsi_stmt (b
);
4033 if (TREE_CODE (stmt
) == COND_EXPR
4034 || TREE_CODE (stmt
) == SWITCH_EXPR
)
4036 bsi_remove (&b
, true);
4037 e
= ssa_redirect_edge (e
, target
);
4038 e
->flags
= EDGE_FALLTHRU
;
4046 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4047 edge representing the redirected branch. */
4050 tree_redirect_edge_and_branch (edge e
, basic_block dest
)
4052 basic_block bb
= e
->src
;
4053 block_stmt_iterator bsi
;
4057 if (e
->flags
& EDGE_ABNORMAL
)
4060 if (e
->src
!= ENTRY_BLOCK_PTR
4061 && (ret
= tree_try_redirect_by_replacing_jump (e
, dest
)))
4064 if (e
->dest
== dest
)
4067 label
= tree_block_label (dest
);
4069 bsi
= bsi_last (bb
);
4070 stmt
= bsi_end_p (bsi
) ? NULL
: bsi_stmt (bsi
);
4072 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
4075 stmt
= (e
->flags
& EDGE_TRUE_VALUE
4076 ? COND_EXPR_THEN (stmt
)
4077 : COND_EXPR_ELSE (stmt
));
4078 GOTO_DESTINATION (stmt
) = label
;
4082 /* No non-abnormal edges should lead from a non-simple goto, and
4083 simple ones should be represented implicitly. */
4088 tree cases
= get_cases_for_edge (e
, stmt
);
4090 /* If we have a list of cases associated with E, then use it
4091 as it's a lot faster than walking the entire case vector. */
4094 edge e2
= find_edge (e
->src
, dest
);
4101 CASE_LABEL (cases
) = label
;
4102 cases
= TREE_CHAIN (cases
);
4105 /* If there was already an edge in the CFG, then we need
4106 to move all the cases associated with E to E2. */
4109 tree cases2
= get_cases_for_edge (e2
, stmt
);
4111 TREE_CHAIN (last
) = TREE_CHAIN (cases2
);
4112 TREE_CHAIN (cases2
) = first
;
4117 tree vec
= SWITCH_LABELS (stmt
);
4118 size_t i
, n
= TREE_VEC_LENGTH (vec
);
4120 for (i
= 0; i
< n
; i
++)
4122 tree elt
= TREE_VEC_ELT (vec
, i
);
4124 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
4125 CASE_LABEL (elt
) = label
;
4133 bsi_remove (&bsi
, true);
4134 e
->flags
|= EDGE_FALLTHRU
;
4138 /* Otherwise it must be a fallthru edge, and we don't need to
4139 do anything besides redirecting it. */
4140 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
4144 /* Update/insert PHI nodes as necessary. */
4146 /* Now update the edges in the CFG. */
4147 e
= ssa_redirect_edge (e
, dest
);
4152 /* Returns true if it is possible to remove edge E by redirecting
4153 it to the destination of the other edge from E->src. */
4156 tree_can_remove_branch_p (edge e
)
4158 if (e
->flags
& EDGE_ABNORMAL
)
4164 /* Simple wrapper, as we can always redirect fallthru edges. */
4167 tree_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4169 e
= tree_redirect_edge_and_branch (e
, dest
);
4176 /* Splits basic block BB after statement STMT (but at least after the
4177 labels). If STMT is NULL, BB is split just after the labels. */
4180 tree_split_block (basic_block bb
, void *stmt
)
4182 block_stmt_iterator bsi
;
4183 tree_stmt_iterator tsi_tgt
;
4189 new_bb
= create_empty_bb (bb
);
4191 /* Redirect the outgoing edges. */
4192 new_bb
->succs
= bb
->succs
;
4194 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
4197 if (stmt
&& TREE_CODE ((tree
) stmt
) == LABEL_EXPR
)
4200 /* Move everything from BSI to the new basic block. */
4201 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4203 act
= bsi_stmt (bsi
);
4204 if (TREE_CODE (act
) == LABEL_EXPR
)
4217 if (bsi_end_p (bsi
))
4220 /* Split the statement list - avoid re-creating new containers as this
4221 brings ugly quadratic memory consumption in the inliner.
4222 (We are still quadratic since we need to update stmt BB pointers,
4224 new_bb
->stmt_list
= tsi_split_statement_list_before (&bsi
.tsi
);
4225 for (tsi_tgt
= tsi_start (new_bb
->stmt_list
);
4226 !tsi_end_p (tsi_tgt
); tsi_next (&tsi_tgt
))
4227 change_bb_for_stmt (tsi_stmt (tsi_tgt
), new_bb
);
4233 /* Moves basic block BB after block AFTER. */
4236 tree_move_block_after (basic_block bb
, basic_block after
)
4238 if (bb
->prev_bb
== after
)
4242 link_block (bb
, after
);
4248 /* Return true if basic_block can be duplicated. */
4251 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED
)
4257 /* Create a duplicate of the basic block BB. NOTE: This does not
4258 preserve SSA form. */
4261 tree_duplicate_bb (basic_block bb
)
4264 block_stmt_iterator bsi
, bsi_tgt
;
4267 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
4269 /* Copy the PHI nodes. We ignore PHI node arguments here because
4270 the incoming edges have not been setup yet. */
4271 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4273 tree copy
= create_phi_node (PHI_RESULT (phi
), new_bb
);
4274 create_new_def_for (PHI_RESULT (copy
), copy
, PHI_RESULT_PTR (copy
));
4277 /* Keep the chain of PHI nodes in the same order so that they can be
4278 updated by ssa_redirect_edge. */
4279 set_phi_nodes (new_bb
, phi_reverse (phi_nodes (new_bb
)));
4281 bsi_tgt
= bsi_start (new_bb
);
4282 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4284 def_operand_p def_p
;
4285 ssa_op_iter op_iter
;
4289 stmt
= bsi_stmt (bsi
);
4290 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4293 /* Create a new copy of STMT and duplicate STMT's virtual
4295 copy
= unshare_expr (stmt
);
4296 bsi_insert_after (&bsi_tgt
, copy
, BSI_NEW_STMT
);
4297 copy_virtual_operands (copy
, stmt
);
4298 region
= lookup_stmt_eh_region (stmt
);
4300 add_stmt_to_eh_region (copy
, region
);
4301 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
4303 /* Create new names for all the definitions created by COPY and
4304 add replacement mappings for each new name. */
4305 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
4306 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
4313 /* Basic block BB_COPY was created by code duplication. Add phi node
4314 arguments for edges going out of BB_COPY. The blocks that were
4315 duplicated have BB_DUPLICATED set. */
4318 add_phi_args_after_copy_bb (basic_block bb_copy
)
4320 basic_block bb
, dest
;
4323 tree phi
, phi_copy
, phi_next
, def
;
4325 bb
= get_bb_original (bb_copy
);
4327 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
4329 if (!phi_nodes (e_copy
->dest
))
4332 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
4333 dest
= get_bb_original (e_copy
->dest
);
4335 dest
= e_copy
->dest
;
4337 e
= find_edge (bb
, dest
);
4340 /* During loop unrolling the target of the latch edge is copied.
4341 In this case we are not looking for edge to dest, but to
4342 duplicated block whose original was dest. */
4343 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4344 if ((e
->dest
->flags
& BB_DUPLICATED
)
4345 && get_bb_original (e
->dest
) == dest
)
4348 gcc_assert (e
!= NULL
);
4351 for (phi
= phi_nodes (e
->dest
), phi_copy
= phi_nodes (e_copy
->dest
);
4353 phi
= phi_next
, phi_copy
= PHI_CHAIN (phi_copy
))
4355 phi_next
= PHI_CHAIN (phi
);
4356 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4357 add_phi_arg (phi_copy
, def
, e_copy
);
4362 /* Blocks in REGION_COPY array of length N_REGION were created by
4363 duplication of basic blocks. Add phi node arguments for edges
4364 going from these blocks. */
4367 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
)
4371 for (i
= 0; i
< n_region
; i
++)
4372 region_copy
[i
]->flags
|= BB_DUPLICATED
;
4374 for (i
= 0; i
< n_region
; i
++)
4375 add_phi_args_after_copy_bb (region_copy
[i
]);
4377 for (i
= 0; i
< n_region
; i
++)
4378 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
4381 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4382 important exit edge EXIT. By important we mean that no SSA name defined
4383 inside region is live over the other exit edges of the region. All entry
4384 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4385 to the duplicate of the region. SSA form, dominance and loop information
4386 is updated. The new basic blocks are stored to REGION_COPY in the same
4387 order as they had in REGION, provided that REGION_COPY is not NULL.
4388 The function returns false if it is unable to copy the region,
4392 tree_duplicate_sese_region (edge entry
, edge exit
,
4393 basic_block
*region
, unsigned n_region
,
4394 basic_block
*region_copy
)
4397 bool free_region_copy
= false, copying_header
= false;
4398 struct loop
*loop
= entry
->dest
->loop_father
;
4402 int total_freq
= 0, entry_freq
= 0;
4403 gcov_type total_count
= 0, entry_count
= 0;
4405 if (!can_copy_bbs_p (region
, n_region
))
4408 /* Some sanity checking. Note that we do not check for all possible
4409 missuses of the functions. I.e. if you ask to copy something weird,
4410 it will work, but the state of structures probably will not be
4412 for (i
= 0; i
< n_region
; i
++)
4414 /* We do not handle subloops, i.e. all the blocks must belong to the
4416 if (region
[i
]->loop_father
!= loop
)
4419 if (region
[i
] != entry
->dest
4420 && region
[i
] == loop
->header
)
4426 /* In case the function is used for loop header copying (which is the primary
4427 use), ensure that EXIT and its copy will be new latch and entry edges. */
4428 if (loop
->header
== entry
->dest
)
4430 copying_header
= true;
4431 loop
->copy
= loop
->outer
;
4433 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
4436 for (i
= 0; i
< n_region
; i
++)
4437 if (region
[i
] != exit
->src
4438 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
4444 region_copy
= XNEWVEC (basic_block
, n_region
);
4445 free_region_copy
= true;
4448 gcc_assert (!need_ssa_update_p ());
4450 /* Record blocks outside the region that are dominated by something
4452 doms
= XNEWVEC (basic_block
, n_basic_blocks
);
4453 initialize_original_copy_tables ();
4455 n_doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
, doms
);
4457 if (entry
->dest
->count
)
4459 total_count
= entry
->dest
->count
;
4460 entry_count
= entry
->count
;
4461 /* Fix up corner cases, to avoid division by zero or creation of negative
4463 if (entry_count
> total_count
)
4464 entry_count
= total_count
;
4468 total_freq
= entry
->dest
->frequency
;
4469 entry_freq
= EDGE_FREQUENCY (entry
);
4470 /* Fix up corner cases, to avoid division by zero or creation of negative
4472 if (total_freq
== 0)
4474 else if (entry_freq
> total_freq
)
4475 entry_freq
= total_freq
;
4478 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
4479 split_edge_bb_loc (entry
));
4482 scale_bbs_frequencies_gcov_type (region
, n_region
,
4483 total_count
- entry_count
,
4485 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
4490 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
4492 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
4497 loop
->header
= exit
->dest
;
4498 loop
->latch
= exit
->src
;
4501 /* Redirect the entry and add the phi node arguments. */
4502 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
4503 gcc_assert (redirected
!= NULL
);
4504 flush_pending_stmts (entry
);
4506 /* Concerning updating of dominators: We must recount dominators
4507 for entry block and its copy. Anything that is outside of the
4508 region, but was dominated by something inside needs recounting as
4510 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
4511 doms
[n_doms
++] = get_bb_original (entry
->dest
);
4512 iterate_fix_dominators (CDI_DOMINATORS
, doms
, n_doms
);
4515 /* Add the other PHI node arguments. */
4516 add_phi_args_after_copy (region_copy
, n_region
);
4518 /* Update the SSA web. */
4519 update_ssa (TODO_update_ssa
);
4521 if (free_region_copy
)
4524 free_original_copy_tables ();
4529 DEF_VEC_P(basic_block);
4530 DEF_VEC_ALLOC_P(basic_block,heap);
4533 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4534 adding blocks when the dominator traversal reaches EXIT. This
4535 function silently assumes that ENTRY strictly dominates EXIT. */
4538 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
4539 VEC(basic_block
,heap
) **bbs_p
)
4543 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
4545 son
= next_dom_son (CDI_DOMINATORS
, son
))
4547 VEC_safe_push (basic_block
, heap
, *bbs_p
, son
);
4549 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
4559 bitmap vars_to_remove
;
4560 htab_t new_label_map
;
4564 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4565 contained in *TP and change the DECL_CONTEXT of every local
4566 variable referenced in *TP. */
4569 move_stmt_r (tree
*tp
, int *walk_subtrees
, void *data
)
4571 struct move_stmt_d
*p
= (struct move_stmt_d
*) data
;
4575 && (EXPR_P (t
) || GIMPLE_STMT_P (t
)))
4576 TREE_BLOCK (t
) = p
->block
;
4578 if (OMP_DIRECTIVE_P (t
)
4579 && TREE_CODE (t
) != OMP_RETURN
4580 && TREE_CODE (t
) != OMP_CONTINUE
)
4582 /* Do not remap variables inside OMP directives. Variables
4583 referenced in clauses and directive header belong to the
4584 parent function and should not be moved into the child
4586 bool save_remap_decls_p
= p
->remap_decls_p
;
4587 p
->remap_decls_p
= false;
4590 walk_tree (&OMP_BODY (t
), move_stmt_r
, p
, NULL
);
4592 p
->remap_decls_p
= save_remap_decls_p
;
4594 else if (DECL_P (t
) && DECL_CONTEXT (t
) == p
->from_context
)
4596 if (TREE_CODE (t
) == LABEL_DECL
)
4598 if (p
->new_label_map
)
4600 struct tree_map in
, *out
;
4602 out
= htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
4607 DECL_CONTEXT (t
) = p
->to_context
;
4609 else if (p
->remap_decls_p
)
4611 DECL_CONTEXT (t
) = p
->to_context
;
4613 if (TREE_CODE (t
) == VAR_DECL
)
4615 struct function
*f
= DECL_STRUCT_FUNCTION (p
->to_context
);
4616 f
->unexpanded_var_list
4617 = tree_cons (0, t
, f
->unexpanded_var_list
);
4619 /* Mark T to be removed from the original function,
4620 otherwise it will be given a DECL_RTL when the
4621 original function is expanded. */
4622 bitmap_set_bit (p
->vars_to_remove
, DECL_UID (t
));
4626 else if (TYPE_P (t
))
4633 /* Move basic block BB from function CFUN to function DEST_FN. The
4634 block is moved out of the original linked list and placed after
4635 block AFTER in the new list. Also, the block is removed from the
4636 original array of blocks and placed in DEST_FN's array of blocks.
4637 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4638 updated to reflect the moved edges.
4640 On exit, local variables that need to be removed from
4641 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4644 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
4645 basic_block after
, bool update_edge_count_p
,
4646 bitmap vars_to_remove
, htab_t new_label_map
, int eh_offset
)
4648 struct control_flow_graph
*cfg
;
4651 block_stmt_iterator si
;
4652 struct move_stmt_d d
;
4653 unsigned old_len
, new_len
;
4655 /* Link BB to the new linked list. */
4656 move_block_after (bb
, after
);
4658 /* Update the edge count in the corresponding flowgraphs. */
4659 if (update_edge_count_p
)
4660 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4662 cfun
->cfg
->x_n_edges
--;
4663 dest_cfun
->cfg
->x_n_edges
++;
4666 /* Remove BB from the original basic block array. */
4667 VEC_replace (basic_block
, cfun
->cfg
->x_basic_block_info
, bb
->index
, NULL
);
4668 cfun
->cfg
->x_n_basic_blocks
--;
4670 /* Grow DEST_CFUN's basic block array if needed. */
4671 cfg
= dest_cfun
->cfg
;
4672 cfg
->x_n_basic_blocks
++;
4673 if (bb
->index
> cfg
->x_last_basic_block
)
4674 cfg
->x_last_basic_block
= bb
->index
;
4676 old_len
= VEC_length (basic_block
, cfg
->x_basic_block_info
);
4677 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
4679 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
4680 VEC_safe_grow_cleared (basic_block
, gc
, cfg
->x_basic_block_info
,
4684 VEC_replace (basic_block
, cfg
->x_basic_block_info
,
4685 cfg
->x_last_basic_block
, bb
);
4687 /* The statements in BB need to be associated with a new TREE_BLOCK.
4688 Labels need to be associated with a new label-to-block map. */
4689 memset (&d
, 0, sizeof (d
));
4690 d
.vars_to_remove
= vars_to_remove
;
4692 for (si
= bsi_start (bb
); !bsi_end_p (si
); bsi_next (&si
))
4694 tree stmt
= bsi_stmt (si
);
4697 d
.from_context
= cfun
->decl
;
4698 d
.to_context
= dest_cfun
->decl
;
4699 d
.remap_decls_p
= true;
4700 d
.new_label_map
= new_label_map
;
4701 if (TREE_BLOCK (stmt
))
4702 d
.block
= DECL_INITIAL (dest_cfun
->decl
);
4704 walk_tree (&stmt
, move_stmt_r
, &d
, NULL
);
4706 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4708 tree label
= LABEL_EXPR_LABEL (stmt
);
4709 int uid
= LABEL_DECL_UID (label
);
4711 gcc_assert (uid
> -1);
4713 old_len
= VEC_length (basic_block
, cfg
->x_label_to_block_map
);
4714 if (old_len
<= (unsigned) uid
)
4716 new_len
= 3 * uid
/ 2;
4717 VEC_safe_grow_cleared (basic_block
, gc
,
4718 cfg
->x_label_to_block_map
, new_len
);
4721 VEC_replace (basic_block
, cfg
->x_label_to_block_map
, uid
, bb
);
4722 VEC_replace (basic_block
, cfun
->cfg
->x_label_to_block_map
, uid
, NULL
);
4724 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
4726 if (uid
>= dest_cfun
->last_label_uid
)
4727 dest_cfun
->last_label_uid
= uid
+ 1;
4729 else if (TREE_CODE (stmt
) == RESX_EXPR
&& eh_offset
!= 0)
4730 TREE_OPERAND (stmt
, 0) =
4731 build_int_cst (NULL_TREE
,
4732 TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0))
4735 region
= lookup_stmt_eh_region (stmt
);
4738 add_stmt_to_eh_region_fn (dest_cfun
, stmt
, region
+ eh_offset
);
4739 remove_stmt_from_eh_region (stmt
);
4740 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
4741 gimple_remove_stmt_histograms (cfun
, stmt
);
4746 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4747 the outermost EH region. Use REGION as the incoming base EH region. */
4750 find_outermost_region_in_block (struct function
*src_cfun
,
4751 basic_block bb
, int region
)
4753 block_stmt_iterator si
;
4755 for (si
= bsi_start (bb
); !bsi_end_p (si
); bsi_next (&si
))
4757 tree stmt
= bsi_stmt (si
);
4760 if (TREE_CODE (stmt
) == RESX_EXPR
)
4761 stmt_region
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
4763 stmt_region
= lookup_stmt_eh_region_fn (src_cfun
, stmt
);
4764 if (stmt_region
> 0)
4767 region
= stmt_region
;
4768 else if (stmt_region
!= region
)
4770 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
4771 gcc_assert (region
!= -1);
4780 new_label_mapper (tree decl
, void *data
)
4782 htab_t hash
= (htab_t
) data
;
4786 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
4788 m
= xmalloc (sizeof (struct tree_map
));
4789 m
->hash
= DECL_UID (decl
);
4791 m
->to
= create_artificial_label ();
4792 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
4794 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
4795 gcc_assert (*slot
== NULL
);
4802 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4803 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4804 single basic block in the original CFG and the new basic block is
4805 returned. DEST_CFUN must not have a CFG yet.
4807 Note that the region need not be a pure SESE region. Blocks inside
4808 the region may contain calls to abort/exit. The only restriction
4809 is that ENTRY_BB should be the only entry point and it must
4812 All local variables referenced in the region are assumed to be in
4813 the corresponding BLOCK_VARS and unexpanded variable lists
4814 associated with DEST_CFUN. */
4817 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
4818 basic_block exit_bb
)
4820 VEC(basic_block
,heap
) *bbs
;
4821 basic_block after
, bb
, *entry_pred
, *exit_succ
;
4822 struct function
*saved_cfun
;
4823 int *entry_flag
, *exit_flag
, eh_offset
;
4824 unsigned i
, num_entry_edges
, num_exit_edges
;
4827 bitmap vars_to_remove
;
4828 htab_t new_label_map
;
4832 /* Collect all the blocks in the region. Manually add ENTRY_BB
4833 because it won't be added by dfs_enumerate_from. */
4834 calculate_dominance_info (CDI_DOMINATORS
);
4836 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4838 gcc_assert (entry_bb
!= exit_bb
4840 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
4843 VEC_safe_push (basic_block
, heap
, bbs
, entry_bb
);
4844 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
4846 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4847 the predecessor edges to ENTRY_BB and the successor edges to
4848 EXIT_BB so that we can re-attach them to the new basic block that
4849 will replace the region. */
4850 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
4851 entry_pred
= (basic_block
*) xcalloc (num_entry_edges
, sizeof (basic_block
));
4852 entry_flag
= (int *) xcalloc (num_entry_edges
, sizeof (int));
4854 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
4856 entry_flag
[i
] = e
->flags
;
4857 entry_pred
[i
++] = e
->src
;
4863 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
4864 exit_succ
= (basic_block
*) xcalloc (num_exit_edges
,
4865 sizeof (basic_block
));
4866 exit_flag
= (int *) xcalloc (num_exit_edges
, sizeof (int));
4868 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
4870 exit_flag
[i
] = e
->flags
;
4871 exit_succ
[i
++] = e
->dest
;
4882 /* Switch context to the child function to initialize DEST_FN's CFG. */
4883 gcc_assert (dest_cfun
->cfg
== NULL
);
4886 init_empty_tree_cfg ();
4888 /* Initialize EH information for the new function. */
4890 new_label_map
= NULL
;
4895 for (i
= 0; VEC_iterate (basic_block
, bbs
, i
, bb
); i
++)
4896 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
4898 init_eh_for_function ();
4901 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
4902 eh_offset
= duplicate_eh_regions (saved_cfun
, new_label_mapper
,
4903 new_label_map
, region
, 0);
4909 /* Move blocks from BBS into DEST_CFUN. */
4910 gcc_assert (VEC_length (basic_block
, bbs
) >= 2);
4911 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
4912 vars_to_remove
= BITMAP_ALLOC (NULL
);
4913 for (i
= 0; VEC_iterate (basic_block
, bbs
, i
, bb
); i
++)
4915 /* No need to update edge counts on the last block. It has
4916 already been updated earlier when we detached the region from
4917 the original CFG. */
4918 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, vars_to_remove
,
4919 new_label_map
, eh_offset
);
4924 htab_delete (new_label_map
);
4926 /* Remove the variables marked in VARS_TO_REMOVE from
4927 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4928 DECL_RTL in the context of CFUN. */
4929 if (!bitmap_empty_p (vars_to_remove
))
4933 for (p
= &cfun
->unexpanded_var_list
; *p
; )
4935 tree var
= TREE_VALUE (*p
);
4936 if (bitmap_bit_p (vars_to_remove
, DECL_UID (var
)))
4938 *p
= TREE_CHAIN (*p
);
4942 p
= &TREE_CHAIN (*p
);
4946 BITMAP_FREE (vars_to_remove
);
4948 /* Rewire the entry and exit blocks. The successor to the entry
4949 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4950 the child function. Similarly, the predecessor of DEST_FN's
4951 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4952 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4953 various CFG manipulation function get to the right CFG.
4955 FIXME, this is silly. The CFG ought to become a parameter to
4958 make_edge (ENTRY_BLOCK_PTR
, entry_bb
, EDGE_FALLTHRU
);
4960 make_edge (exit_bb
, EXIT_BLOCK_PTR
, 0);
4963 /* Back in the original function, the SESE region has disappeared,
4964 create a new basic block in its place. */
4965 bb
= create_empty_bb (entry_pred
[0]);
4966 for (i
= 0; i
< num_entry_edges
; i
++)
4967 make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
4969 for (i
= 0; i
< num_exit_edges
; i
++)
4970 make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
4979 free_dominance_info (CDI_DOMINATORS
);
4980 free_dominance_info (CDI_POST_DOMINATORS
);
4981 VEC_free (basic_block
, heap
, bbs
);
4987 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4990 dump_function_to_file (tree fn
, FILE *file
, int flags
)
4992 tree arg
, vars
, var
;
4993 bool ignore_topmost_bind
= false, any_var
= false;
4996 struct function
*saved_cfun
;
4998 fprintf (file
, "%s (", lang_hooks
.decl_printable_name (fn
, 2));
5000 arg
= DECL_ARGUMENTS (fn
);
5003 print_generic_expr (file
, arg
, dump_flags
);
5004 if (TREE_CHAIN (arg
))
5005 fprintf (file
, ", ");
5006 arg
= TREE_CHAIN (arg
);
5008 fprintf (file
, ")\n");
5010 if (flags
& TDF_DETAILS
)
5011 dump_eh_tree (file
, DECL_STRUCT_FUNCTION (fn
));
5012 if (flags
& TDF_RAW
)
5014 dump_node (fn
, TDF_SLIM
| flags
, file
);
5018 /* Switch CFUN to point to FN. */
5020 cfun
= DECL_STRUCT_FUNCTION (fn
);
5022 /* When GIMPLE is lowered, the variables are no longer available in
5023 BIND_EXPRs, so display them separately. */
5024 if (cfun
&& cfun
->decl
== fn
&& cfun
->unexpanded_var_list
)
5026 ignore_topmost_bind
= true;
5028 fprintf (file
, "{\n");
5029 for (vars
= cfun
->unexpanded_var_list
; vars
; vars
= TREE_CHAIN (vars
))
5031 var
= TREE_VALUE (vars
);
5033 print_generic_decl (file
, var
, flags
);
5034 fprintf (file
, "\n");
5040 if (cfun
&& cfun
->decl
== fn
&& cfun
->cfg
&& basic_block_info
)
5042 /* Make a CFG based dump. */
5043 check_bb_profile (ENTRY_BLOCK_PTR
, file
);
5044 if (!ignore_topmost_bind
)
5045 fprintf (file
, "{\n");
5047 if (any_var
&& n_basic_blocks
)
5048 fprintf (file
, "\n");
5051 dump_generic_bb (file
, bb
, 2, flags
);
5053 fprintf (file
, "}\n");
5054 check_bb_profile (EXIT_BLOCK_PTR
, file
);
5060 /* Make a tree based dump. */
5061 chain
= DECL_SAVED_TREE (fn
);
5063 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
5065 if (ignore_topmost_bind
)
5067 chain
= BIND_EXPR_BODY (chain
);
5075 if (!ignore_topmost_bind
)
5076 fprintf (file
, "{\n");
5081 fprintf (file
, "\n");
5083 print_generic_stmt_indented (file
, chain
, flags
, indent
);
5084 if (ignore_topmost_bind
)
5085 fprintf (file
, "}\n");
5088 fprintf (file
, "\n\n");
5095 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5098 debug_function (tree fn
, int flags
)
5100 dump_function_to_file (fn
, stderr
, flags
);
5104 /* Pretty print of the loops intermediate representation. */
5105 static void print_loop (FILE *, struct loop
*, int);
5106 static void print_pred_bbs (FILE *, basic_block bb
);
5107 static void print_succ_bbs (FILE *, basic_block bb
);
5110 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5113 print_pred_bbs (FILE *file
, basic_block bb
)
5118 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5119 fprintf (file
, "bb_%d ", e
->src
->index
);
5123 /* Print on FILE the indexes for the successors of basic_block BB. */
5126 print_succ_bbs (FILE *file
, basic_block bb
)
5131 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5132 fprintf (file
, "bb_%d ", e
->dest
->index
);
5136 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5139 print_loop (FILE *file
, struct loop
*loop
, int indent
)
5147 s_indent
= (char *) alloca ((size_t) indent
+ 1);
5148 memset ((void *) s_indent
, ' ', (size_t) indent
);
5149 s_indent
[indent
] = '\0';
5151 /* Print the loop's header. */
5152 fprintf (file
, "%sloop_%d\n", s_indent
, loop
->num
);
5154 /* Print the loop's body. */
5155 fprintf (file
, "%s{\n", s_indent
);
5157 if (bb
->loop_father
== loop
)
5159 /* Print the basic_block's header. */
5160 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
5161 print_pred_bbs (file
, bb
);
5162 fprintf (file
, "}, succs = {");
5163 print_succ_bbs (file
, bb
);
5164 fprintf (file
, "})\n");
5166 /* Print the basic_block's body. */
5167 fprintf (file
, "%s {\n", s_indent
);
5168 tree_dump_bb (bb
, file
, indent
+ 4);
5169 fprintf (file
, "%s }\n", s_indent
);
5172 print_loop (file
, loop
->inner
, indent
+ 2);
5173 fprintf (file
, "%s}\n", s_indent
);
5174 print_loop (file
, loop
->next
, indent
);
5178 /* Follow a CFG edge from the entry point of the program, and on entry
5179 of a loop, pretty print the loop structure on FILE. */
5182 print_loop_ir (FILE *file
)
5186 bb
= BASIC_BLOCK (NUM_FIXED_BLOCKS
);
5187 if (bb
&& bb
->loop_father
)
5188 print_loop (file
, bb
->loop_father
, 0);
5192 /* Debugging loops structure at tree level. */
5195 debug_loop_ir (void)
5197 print_loop_ir (stderr
);
5201 /* Return true if BB ends with a call, possibly followed by some
5202 instructions that must stay with the call. Return false,
5206 tree_block_ends_with_call_p (basic_block bb
)
5208 block_stmt_iterator bsi
= bsi_last (bb
);
5209 return get_call_expr_in (bsi_stmt (bsi
)) != NULL
;
5213 /* Return true if BB ends with a conditional branch. Return false,
5217 tree_block_ends_with_condjump_p (basic_block bb
)
5219 tree stmt
= last_stmt (bb
);
5220 return (stmt
&& TREE_CODE (stmt
) == COND_EXPR
);
5224 /* Return true if we need to add fake edge to exit at statement T.
5225 Helper function for tree_flow_call_edges_add. */
5228 need_fake_edge_p (tree t
)
5232 /* NORETURN and LONGJMP calls already have an edge to exit.
5233 CONST and PURE calls do not need one.
5234 We don't currently check for CONST and PURE here, although
5235 it would be a good idea, because those attributes are
5236 figured out from the RTL in mark_constant_function, and
5237 the counter incrementation code from -fprofile-arcs
5238 leads to different results from -fbranch-probabilities. */
5239 call
= get_call_expr_in (t
);
5241 && !(call_expr_flags (call
) & ECF_NORETURN
))
5244 if (TREE_CODE (t
) == ASM_EXPR
5245 && (ASM_VOLATILE_P (t
) || ASM_INPUT_P (t
)))
5252 /* Add fake edges to the function exit for any non constant and non
5253 noreturn calls, volatile inline assembly in the bitmap of blocks
5254 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5255 the number of blocks that were split.
5257 The goal is to expose cases in which entering a basic block does
5258 not imply that all subsequent instructions must be executed. */
5261 tree_flow_call_edges_add (sbitmap blocks
)
5264 int blocks_split
= 0;
5265 int last_bb
= last_basic_block
;
5266 bool check_last_block
= false;
5268 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
5272 check_last_block
= true;
5274 check_last_block
= TEST_BIT (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
5276 /* In the last basic block, before epilogue generation, there will be
5277 a fallthru edge to EXIT. Special care is required if the last insn
5278 of the last basic block is a call because make_edge folds duplicate
5279 edges, which would result in the fallthru edge also being marked
5280 fake, which would result in the fallthru edge being removed by
5281 remove_fake_edges, which would result in an invalid CFG.
5283 Moreover, we can't elide the outgoing fake edge, since the block
5284 profiler needs to take this into account in order to solve the minimal
5285 spanning tree in the case that the call doesn't return.
5287 Handle this by adding a dummy instruction in a new last basic block. */
5288 if (check_last_block
)
5290 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
5291 block_stmt_iterator bsi
= bsi_last (bb
);
5293 if (!bsi_end_p (bsi
))
5296 if (t
&& need_fake_edge_p (t
))
5300 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
5303 bsi_insert_on_edge (e
, build_empty_stmt ());
5304 bsi_commit_edge_inserts ();
5309 /* Now add fake edges to the function exit for any non constant
5310 calls since there is no way that we can determine if they will
5312 for (i
= 0; i
< last_bb
; i
++)
5314 basic_block bb
= BASIC_BLOCK (i
);
5315 block_stmt_iterator bsi
;
5316 tree stmt
, last_stmt
;
5321 if (blocks
&& !TEST_BIT (blocks
, i
))
5324 bsi
= bsi_last (bb
);
5325 if (!bsi_end_p (bsi
))
5327 last_stmt
= bsi_stmt (bsi
);
5330 stmt
= bsi_stmt (bsi
);
5331 if (need_fake_edge_p (stmt
))
5334 /* The handling above of the final block before the
5335 epilogue should be enough to verify that there is
5336 no edge to the exit block in CFG already.
5337 Calling make_edge in such case would cause us to
5338 mark that edge as fake and remove it later. */
5339 #ifdef ENABLE_CHECKING
5340 if (stmt
== last_stmt
)
5342 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
5343 gcc_assert (e
== NULL
);
5347 /* Note that the following may create a new basic block
5348 and renumber the existing basic blocks. */
5349 if (stmt
!= last_stmt
)
5351 e
= split_block (bb
, stmt
);
5355 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
5359 while (!bsi_end_p (bsi
));
5364 verify_flow_info ();
5366 return blocks_split
;
5369 /* Purge dead abnormal call edges from basic block BB. */
5372 tree_purge_dead_abnormal_call_edges (basic_block bb
)
5374 bool changed
= tree_purge_dead_eh_edges (bb
);
5376 if (current_function_has_nonlocal_label
)
5378 tree stmt
= last_stmt (bb
);
5382 if (!(stmt
&& tree_can_make_abnormal_goto (stmt
)))
5383 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5385 if (e
->flags
& EDGE_ABNORMAL
)
5394 /* See tree_purge_dead_eh_edges below. */
5396 free_dominance_info (CDI_DOMINATORS
);
5402 /* Purge dead EH edges from basic block BB. */
5405 tree_purge_dead_eh_edges (basic_block bb
)
5407 bool changed
= false;
5410 tree stmt
= last_stmt (bb
);
5412 if (stmt
&& tree_can_throw_internal (stmt
))
5415 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5417 if (e
->flags
& EDGE_EH
)
5426 /* Removal of dead EH edges might change dominators of not
5427 just immediate successors. E.g. when bb1 is changed so that
5428 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5429 eh edges purged by this function in:
5441 idom(bb5) must be recomputed. For now just free the dominance
5444 free_dominance_info (CDI_DOMINATORS
);
5450 tree_purge_all_dead_eh_edges (bitmap blocks
)
5452 bool changed
= false;
5456 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
5458 changed
|= tree_purge_dead_eh_edges (BASIC_BLOCK (i
));
5464 /* This function is called whenever a new edge is created or
5468 tree_execute_on_growing_pred (edge e
)
5470 basic_block bb
= e
->dest
;
5473 reserve_phi_args_for_new_edge (bb
);
5476 /* This function is called immediately before edge E is removed from
5477 the edge vector E->dest->preds. */
5480 tree_execute_on_shrinking_pred (edge e
)
5482 if (phi_nodes (e
->dest
))
5483 remove_phi_args (e
);
5486 /*---------------------------------------------------------------------------
5487 Helper functions for Loop versioning
5488 ---------------------------------------------------------------------------*/
5490 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5491 of 'first'. Both of them are dominated by 'new_head' basic block. When
5492 'new_head' was created by 'second's incoming edge it received phi arguments
5493 on the edge by split_edge(). Later, additional edge 'e' was created to
5494 connect 'new_head' and 'first'. Now this routine adds phi args on this
5495 additional edge 'e' that new_head to second edge received as part of edge
5500 tree_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
5501 basic_block new_head
, edge e
)
5504 edge e2
= find_edge (new_head
, second
);
5506 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5507 edge, we should always have an edge from NEW_HEAD to SECOND. */
5508 gcc_assert (e2
!= NULL
);
5510 /* Browse all 'second' basic block phi nodes and add phi args to
5511 edge 'e' for 'first' head. PHI args are always in correct order. */
5513 for (phi2
= phi_nodes (second
), phi1
= phi_nodes (first
);
5515 phi2
= PHI_CHAIN (phi2
), phi1
= PHI_CHAIN (phi1
))
5517 tree def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
5518 add_phi_arg (phi1
, def
, e
);
5522 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5523 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5524 the destination of the ELSE part. */
5526 tree_lv_add_condition_to_bb (basic_block first_head
, basic_block second_head
,
5527 basic_block cond_bb
, void *cond_e
)
5529 block_stmt_iterator bsi
;
5530 tree goto1
= NULL_TREE
;
5531 tree goto2
= NULL_TREE
;
5532 tree new_cond_expr
= NULL_TREE
;
5533 tree cond_expr
= (tree
) cond_e
;
5536 /* Build new conditional expr */
5537 goto1
= build1 (GOTO_EXPR
, void_type_node
, tree_block_label (first_head
));
5538 goto2
= build1 (GOTO_EXPR
, void_type_node
, tree_block_label (second_head
));
5539 new_cond_expr
= build3 (COND_EXPR
, void_type_node
, cond_expr
, goto1
, goto2
);
5541 /* Add new cond in cond_bb. */
5542 bsi
= bsi_start (cond_bb
);
5543 bsi_insert_after (&bsi
, new_cond_expr
, BSI_NEW_STMT
);
5544 /* Adjust edges appropriately to connect new head with first head
5545 as well as second head. */
5546 e0
= single_succ_edge (cond_bb
);
5547 e0
->flags
&= ~EDGE_FALLTHRU
;
5548 e0
->flags
|= EDGE_FALSE_VALUE
;
5551 struct cfg_hooks tree_cfg_hooks
= {
5553 tree_verify_flow_info
,
5554 tree_dump_bb
, /* dump_bb */
5555 create_bb
, /* create_basic_block */
5556 tree_redirect_edge_and_branch
,/* redirect_edge_and_branch */
5557 tree_redirect_edge_and_branch_force
,/* redirect_edge_and_branch_force */
5558 tree_can_remove_branch_p
, /* can_remove_branch_p */
5559 remove_bb
, /* delete_basic_block */
5560 tree_split_block
, /* split_block */
5561 tree_move_block_after
, /* move_block_after */
5562 tree_can_merge_blocks_p
, /* can_merge_blocks_p */
5563 tree_merge_blocks
, /* merge_blocks */
5564 tree_predict_edge
, /* predict_edge */
5565 tree_predicted_by_p
, /* predicted_by_p */
5566 tree_can_duplicate_bb_p
, /* can_duplicate_block_p */
5567 tree_duplicate_bb
, /* duplicate_block */
5568 tree_split_edge
, /* split_edge */
5569 tree_make_forwarder_block
, /* make_forward_block */
5570 NULL
, /* tidy_fallthru_edge */
5571 tree_block_ends_with_call_p
, /* block_ends_with_call_p */
5572 tree_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
5573 tree_flow_call_edges_add
, /* flow_call_edges_add */
5574 tree_execute_on_growing_pred
, /* execute_on_growing_pred */
5575 tree_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
5576 tree_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
5577 tree_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
5578 tree_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
5579 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
5580 flush_pending_stmts
/* flush_pending_stmts */
5584 /* Split all critical edges. */
5587 split_critical_edges (void)
5593 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5594 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5595 mappings around the calls to split_edge. */
5596 start_recording_case_labels ();
5599 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5600 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
5605 end_recording_case_labels ();
5609 struct tree_opt_pass pass_split_crit_edges
=
5611 "crited", /* name */
5613 split_critical_edges
, /* execute */
5616 0, /* static_pass_number */
5617 TV_TREE_SPLIT_EDGES
, /* tv_id */
5618 PROP_cfg
, /* properties required */
5619 PROP_no_crit_edges
, /* properties_provided */
5620 0, /* properties_destroyed */
5621 0, /* todo_flags_start */
5622 TODO_dump_func
, /* todo_flags_finish */
5627 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5628 a temporary, make sure and register it to be renamed if necessary,
5629 and finally return the temporary. Put the statements to compute
5630 EXP before the current statement in BSI. */
5633 gimplify_val (block_stmt_iterator
*bsi
, tree type
, tree exp
)
5635 tree t
, new_stmt
, orig_stmt
;
5637 if (is_gimple_val (exp
))
5640 t
= make_rename_temp (type
, NULL
);
5641 new_stmt
= build2_gimple (GIMPLE_MODIFY_STMT
, t
, exp
);
5643 orig_stmt
= bsi_stmt (*bsi
);
5644 SET_EXPR_LOCUS (new_stmt
, EXPR_LOCUS (orig_stmt
));
5645 TREE_BLOCK (new_stmt
) = TREE_BLOCK (orig_stmt
);
5647 bsi_insert_before (bsi
, new_stmt
, BSI_SAME_STMT
);
5648 if (gimple_in_ssa_p (cfun
))
5649 mark_symbols_for_renaming (new_stmt
);
5654 /* Build a ternary operation and gimplify it. Emit code before BSI.
5655 Return the gimple_val holding the result. */
5658 gimplify_build3 (block_stmt_iterator
*bsi
, enum tree_code code
,
5659 tree type
, tree a
, tree b
, tree c
)
5663 ret
= fold_build3 (code
, type
, a
, b
, c
);
5666 return gimplify_val (bsi
, type
, ret
);
5669 /* Build a binary operation and gimplify it. Emit code before BSI.
5670 Return the gimple_val holding the result. */
5673 gimplify_build2 (block_stmt_iterator
*bsi
, enum tree_code code
,
5674 tree type
, tree a
, tree b
)
5678 ret
= fold_build2 (code
, type
, a
, b
);
5681 return gimplify_val (bsi
, type
, ret
);
5684 /* Build a unary operation and gimplify it. Emit code before BSI.
5685 Return the gimple_val holding the result. */
5688 gimplify_build1 (block_stmt_iterator
*bsi
, enum tree_code code
, tree type
,
5693 ret
= fold_build1 (code
, type
, a
);
5696 return gimplify_val (bsi
, type
, ret
);
5701 /* Emit return warnings. */
5704 execute_warn_function_return (void)
5706 #ifdef USE_MAPPED_LOCATION
5707 source_location location
;
5715 /* If we have a path to EXIT, then we do return. */
5716 if (TREE_THIS_VOLATILE (cfun
->decl
)
5717 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
5719 #ifdef USE_MAPPED_LOCATION
5720 location
= UNKNOWN_LOCATION
;
5724 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5726 last
= last_stmt (e
->src
);
5727 if (TREE_CODE (last
) == RETURN_EXPR
5728 #ifdef USE_MAPPED_LOCATION
5729 && (location
= EXPR_LOCATION (last
)) != UNKNOWN_LOCATION
)
5731 && (locus
= EXPR_LOCUS (last
)) != NULL
)
5735 #ifdef USE_MAPPED_LOCATION
5736 if (location
== UNKNOWN_LOCATION
)
5737 location
= cfun
->function_end_locus
;
5738 warning (0, "%H%<noreturn%> function does return", &location
);
5741 locus
= &cfun
->function_end_locus
;
5742 warning (0, "%H%<noreturn%> function does return", locus
);
5746 /* If we see "return;" in some basic block, then we do reach the end
5747 without returning a value. */
5748 else if (warn_return_type
5749 && !TREE_NO_WARNING (cfun
->decl
)
5750 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
5751 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
5753 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5755 tree last
= last_stmt (e
->src
);
5756 if (TREE_CODE (last
) == RETURN_EXPR
5757 && TREE_OPERAND (last
, 0) == NULL
5758 && !TREE_NO_WARNING (last
))
5760 #ifdef USE_MAPPED_LOCATION
5761 location
= EXPR_LOCATION (last
);
5762 if (location
== UNKNOWN_LOCATION
)
5763 location
= cfun
->function_end_locus
;
5764 warning (0, "%Hcontrol reaches end of non-void function", &location
);
5766 locus
= EXPR_LOCUS (last
);
5768 locus
= &cfun
->function_end_locus
;
5769 warning (0, "%Hcontrol reaches end of non-void function", locus
);
5771 TREE_NO_WARNING (cfun
->decl
) = 1;
5780 /* Given a basic block B which ends with a conditional and has
5781 precisely two successors, determine which of the edges is taken if
5782 the conditional is true and which is taken if the conditional is
5783 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5786 extract_true_false_edges_from_block (basic_block b
,
5790 edge e
= EDGE_SUCC (b
, 0);
5792 if (e
->flags
& EDGE_TRUE_VALUE
)
5795 *false_edge
= EDGE_SUCC (b
, 1);
5800 *true_edge
= EDGE_SUCC (b
, 1);
5804 struct tree_opt_pass pass_warn_function_return
=
5808 execute_warn_function_return
, /* execute */
5811 0, /* static_pass_number */
5813 PROP_cfg
, /* properties_required */
5814 0, /* properties_provided */
5815 0, /* properties_destroyed */
5816 0, /* todo_flags_start */
5817 0, /* todo_flags_finish */
5821 /* Emit noreturn warnings. */
5824 execute_warn_function_noreturn (void)
5826 if (warn_missing_noreturn
5827 && !TREE_THIS_VOLATILE (cfun
->decl
)
5828 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0
5829 && !lang_hooks
.function
.missing_noreturn_ok_p (cfun
->decl
))
5830 warning (OPT_Wmissing_noreturn
, "%Jfunction might be possible candidate "
5831 "for attribute %<noreturn%>",
5836 struct tree_opt_pass pass_warn_function_noreturn
=
5840 execute_warn_function_noreturn
, /* execute */
5843 0, /* static_pass_number */
5845 PROP_cfg
, /* properties_required */
5846 0, /* properties_provided */
5847 0, /* properties_destroyed */
5848 0, /* todo_flags_start */
5849 0, /* todo_flags_finish */