1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
54 /* Local declarations. */
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity
= 20;
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
72 static struct pointer_map_t
*edge_to_cases
;
77 long num_merged_labels
;
80 static struct cfg_stats_d cfg_stats
;
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto
;
85 /* Basic blocks and flowgraphs. */
86 static basic_block
create_bb (void *, void *, basic_block
);
87 static void make_blocks (tree
);
88 static void factor_computed_gotos (void);
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block
);
93 static void make_switch_expr_edges (basic_block
);
94 static void make_goto_expr_edges (basic_block
);
95 static edge
tree_redirect_edge_and_branch (edge
, basic_block
);
96 static edge
tree_try_redirect_by_replacing_jump (edge
, basic_block
);
97 static unsigned int split_critical_edges (void);
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree
, const_tree
);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge
);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t
, basic_block bb
);
105 static bool computed_goto_p (const_tree
);
107 /* Flowgraph optimization and cleanup. */
108 static void tree_merge_blocks (basic_block
, basic_block
);
109 static bool tree_can_merge_blocks_p (basic_block
, basic_block
);
110 static void remove_bb (basic_block
);
111 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
112 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
113 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
114 static tree
find_case_label_for_value (tree
, tree
);
117 init_empty_tree_cfg_for_function (struct function
*fn
)
119 /* Initialize the basic block array. */
121 profile_status_for_function (fn
) = PROFILE_ABSENT
;
122 n_basic_blocks_for_function (fn
) = NUM_FIXED_BLOCKS
;
123 last_basic_block_for_function (fn
) = NUM_FIXED_BLOCKS
;
124 basic_block_info_for_function (fn
)
125 = VEC_alloc (basic_block
, gc
, initial_cfg_capacity
);
126 VEC_safe_grow_cleared (basic_block
, gc
,
127 basic_block_info_for_function (fn
),
128 initial_cfg_capacity
);
130 /* Build a mapping of labels to their associated blocks. */
131 label_to_block_map_for_function (fn
)
132 = VEC_alloc (basic_block
, gc
, initial_cfg_capacity
);
133 VEC_safe_grow_cleared (basic_block
, gc
,
134 label_to_block_map_for_function (fn
),
135 initial_cfg_capacity
);
137 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, ENTRY_BLOCK
,
138 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
));
139 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, EXIT_BLOCK
,
140 EXIT_BLOCK_PTR_FOR_FUNCTION (fn
));
142 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
)->next_bb
143 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn
);
144 EXIT_BLOCK_PTR_FOR_FUNCTION (fn
)->prev_bb
145 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
);
149 init_empty_tree_cfg (void)
151 init_empty_tree_cfg_for_function (cfun
);
154 /*---------------------------------------------------------------------------
156 ---------------------------------------------------------------------------*/
158 /* Entry point to the CFG builder for trees. TP points to the list of
159 statements to be added to the flowgraph. */
162 build_tree_cfg (tree
*tp
)
164 /* Register specific tree functions. */
165 tree_register_cfg_hooks ();
167 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
169 init_empty_tree_cfg ();
171 found_computed_goto
= 0;
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto
)
180 factor_computed_gotos ();
182 /* Make sure there is always at least one block, even if it's empty. */
183 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
184 create_empty_bb (ENTRY_BLOCK_PTR
);
186 /* Adjust the size of the array. */
187 if (VEC_length (basic_block
, basic_block_info
) < (size_t) n_basic_blocks
)
188 VEC_safe_grow_cleared (basic_block
, gc
, basic_block_info
, n_basic_blocks
);
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
198 /* Create the edges of the flowgraph. */
200 cleanup_dead_labels ();
202 /* Debugging dumps. */
204 /* Write the flowgraph to a VCG file. */
206 int local_dump_flags
;
207 FILE *vcg_file
= dump_begin (TDI_vcg
, &local_dump_flags
);
210 tree_cfg2vcg (vcg_file
);
211 dump_end (TDI_vcg
, vcg_file
);
215 #ifdef ENABLE_CHECKING
219 /* Dump a textual representation of the flowgraph. */
221 dump_tree_cfg (dump_file
, dump_flags
);
225 execute_build_cfg (void)
227 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl
));
231 struct gimple_opt_pass pass_build_cfg
=
237 execute_build_cfg
, /* execute */
240 0, /* static_pass_number */
241 TV_TREE_CFG
, /* tv_id */
242 PROP_gimple_leh
, /* properties_required */
243 PROP_cfg
, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts
| TODO_cleanup_cfg
/* todo_flags_finish */
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
256 factor_computed_gotos (void)
259 tree factored_label_decl
= NULL
;
261 tree factored_computed_goto_label
= NULL
;
262 tree factored_computed_goto
= NULL
;
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
270 block_stmt_iterator bsi
= bsi_last (bb
);
275 last
= bsi_stmt (bsi
);
277 /* Ignore the computed goto we create when we factor the original
279 if (last
== factored_computed_goto
)
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last
))
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto
)
292 basic_block new_bb
= create_empty_bb (bb
);
293 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
299 var
= create_tmp_var (ptr_type_node
, "gotovar");
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl
= create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR
, void_type_node
, factored_label_decl
);
306 bsi_insert_after (&new_bsi
, factored_computed_goto_label
,
309 /* Build our new computed goto. */
310 factored_computed_goto
= build1 (GOTO_EXPR
, void_type_node
, var
);
311 bsi_insert_after (&new_bsi
, factored_computed_goto
,
315 /* Copy the original computed goto's destination into VAR. */
316 assignment
= build_gimple_modify_stmt (var
,
317 GOTO_DESTINATION (last
));
318 bsi_insert_before (&bsi
, assignment
, BSI_SAME_STMT
);
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last
) = factored_label_decl
;
327 /* Build a flowgraph for the statement_list STMT_LIST. */
330 make_blocks (tree stmt_list
)
332 tree_stmt_iterator i
= tsi_start (stmt_list
);
334 bool start_new_block
= true;
335 bool first_stmt_of_list
= true;
336 basic_block bb
= ENTRY_BLOCK_PTR
;
338 while (!tsi_end_p (i
))
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
348 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
350 if (!first_stmt_of_list
)
351 stmt_list
= tsi_split_statement_list_before (&i
);
352 bb
= create_basic_block (stmt_list
, NULL
, bb
);
353 start_new_block
= false;
356 /* Now add STMT to BB and create the subgraphs for special statement
358 set_bb_for_stmt (stmt
, bb
);
360 if (computed_goto_p (stmt
))
361 found_computed_goto
= true;
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 if (stmt_ends_bb_p (stmt
))
366 start_new_block
= true;
369 first_stmt_of_list
= false;
374 /* Create and return a new empty basic block after bb AFTER. */
377 create_bb (void *h
, void *e
, basic_block after
)
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
388 bb
->index
= last_basic_block
;
390 bb
->il
.tree
= GGC_CNEW (struct tree_bb_info
);
391 set_bb_stmt_list (bb
, h
? (tree
) h
: alloc_stmt_list ());
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb
, after
);
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block
== VEC_length (basic_block
, basic_block_info
))
399 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
400 VEC_safe_grow_cleared (basic_block
, gc
, basic_block_info
, new_size
);
403 /* Add the newly created block to the array. */
404 SET_BASIC_BLOCK (last_basic_block
, bb
);
413 /*---------------------------------------------------------------------------
415 ---------------------------------------------------------------------------*/
417 /* Fold COND_EXPR_COND of each COND_EXPR. */
420 fold_cond_expr_cond (void)
426 tree stmt
= last_stmt (bb
);
429 && TREE_CODE (stmt
) == COND_EXPR
)
434 fold_defer_overflow_warnings ();
435 cond
= fold (COND_EXPR_COND (stmt
));
436 zerop
= integer_zerop (cond
);
437 onep
= integer_onep (cond
);
438 fold_undefer_overflow_warnings (zerop
|| onep
,
440 WARN_STRICT_OVERFLOW_CONDITIONAL
);
442 COND_EXPR_COND (stmt
) = boolean_false_node
;
444 COND_EXPR_COND (stmt
) = boolean_true_node
;
449 /* Join all the blocks in the flowgraph. */
455 struct omp_region
*cur_region
= NULL
;
457 /* Create an edge from entry to the first block with executable
459 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (NUM_FIXED_BLOCKS
), EDGE_FALLTHRU
);
461 /* Traverse the basic block array placing edges. */
464 tree last
= last_stmt (bb
);
469 enum tree_code code
= TREE_CODE (last
);
473 make_goto_expr_edges (bb
);
477 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
481 make_cond_expr_edges (bb
);
485 make_switch_expr_edges (bb
);
489 make_eh_edges (last
);
494 /* If this function receives a nonlocal goto, then we need to
495 make edges from this call site to all the nonlocal goto
497 if (tree_can_make_abnormal_goto (last
))
498 make_abnormal_goto_edges (bb
, true);
500 /* If this statement has reachable exception handlers, then
501 create abnormal edges to them. */
502 make_eh_edges (last
);
504 /* Some calls are known not to return. */
505 fallthru
= !(call_expr_flags (last
) & ECF_NORETURN
);
511 case GIMPLE_MODIFY_STMT
:
512 if (is_ctrl_altering_stmt (last
))
514 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
515 the CALL_EXPR may have an abnormal edge. Search the RHS
516 for this case and create any required edges. */
517 if (tree_can_make_abnormal_goto (last
))
518 make_abnormal_goto_edges (bb
, true);
520 make_eh_edges (last
);
533 cur_region
= new_omp_region (bb
, code
, cur_region
);
538 cur_region
= new_omp_region (bb
, code
, cur_region
);
542 case OMP_SECTIONS_SWITCH
:
547 case OMP_ATOMIC_LOAD
:
548 case OMP_ATOMIC_STORE
:
554 /* In the case of an OMP_SECTION, the edge will go somewhere
555 other than the next block. This will be created later. */
556 cur_region
->exit
= bb
;
557 fallthru
= cur_region
->type
!= OMP_SECTION
;
558 cur_region
= cur_region
->outer
;
562 cur_region
->cont
= bb
;
563 switch (cur_region
->type
)
566 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
567 to prevent splitting them. */
568 single_succ_edge (cur_region
->entry
)->flags
|= EDGE_ABNORMAL
;
569 /* Make the loopback edge. */
570 make_edge (bb
, single_succ (cur_region
->entry
),
573 /* Create an edge from OMP_FOR to exit, which corresponds to
574 the case that the body of the loop is not executed at
576 make_edge (cur_region
->entry
, bb
->next_bb
, EDGE_ABNORMAL
);
577 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
| EDGE_ABNORMAL
);
582 /* Wire up the edges into and out of the nested sections. */
584 basic_block switch_bb
= single_succ (cur_region
->entry
);
586 struct omp_region
*i
;
587 for (i
= cur_region
->inner
; i
; i
= i
->next
)
589 gcc_assert (i
->type
== OMP_SECTION
);
590 make_edge (switch_bb
, i
->entry
, 0);
591 make_edge (i
->exit
, bb
, EDGE_FALLTHRU
);
594 /* Make the loopback edge to the block with
595 OMP_SECTIONS_SWITCH. */
596 make_edge (bb
, switch_bb
, 0);
598 /* Make the edge from the switch to exit. */
599 make_edge (switch_bb
, bb
->next_bb
, 0);
610 gcc_assert (!stmt_ends_bb_p (last
));
618 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
624 /* Fold COND_EXPR_COND of each COND_EXPR. */
625 fold_cond_expr_cond ();
629 /* Create the edges for a COND_EXPR starting at block BB.
630 At this point, both clauses must contain only simple gotos. */
633 make_cond_expr_edges (basic_block bb
)
635 tree entry
= last_stmt (bb
);
636 basic_block then_bb
, else_bb
;
637 tree then_label
, else_label
;
641 gcc_assert (TREE_CODE (entry
) == COND_EXPR
);
643 /* Entry basic blocks for each component. */
644 then_label
= GOTO_DESTINATION (COND_EXPR_THEN (entry
));
645 else_label
= GOTO_DESTINATION (COND_EXPR_ELSE (entry
));
646 then_bb
= label_to_block (then_label
);
647 else_bb
= label_to_block (else_label
);
649 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
650 e
->goto_locus
= EXPR_LOCATION (COND_EXPR_THEN (entry
));
651 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
653 e
->goto_locus
= EXPR_LOCATION (COND_EXPR_ELSE (entry
));
655 /* We do not need the gotos anymore. */
656 COND_EXPR_THEN (entry
) = NULL_TREE
;
657 COND_EXPR_ELSE (entry
) = NULL_TREE
;
661 /* Called for each element in the hash table (P) as we delete the
662 edge to cases hash table.
664 Clear all the TREE_CHAINs to prevent problems with copying of
665 SWITCH_EXPRs and structure sharing rules, then free the hash table
669 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
670 void *data ATTRIBUTE_UNUSED
)
674 for (t
= (tree
) *value
; t
; t
= next
)
676 next
= TREE_CHAIN (t
);
677 TREE_CHAIN (t
) = NULL
;
684 /* Start recording information mapping edges to case labels. */
687 start_recording_case_labels (void)
689 gcc_assert (edge_to_cases
== NULL
);
690 edge_to_cases
= pointer_map_create ();
693 /* Return nonzero if we are recording information for case labels. */
696 recording_case_labels_p (void)
698 return (edge_to_cases
!= NULL
);
701 /* Stop recording information mapping edges to case labels and
702 remove any information we have recorded. */
704 end_recording_case_labels (void)
706 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
707 pointer_map_destroy (edge_to_cases
);
708 edge_to_cases
= NULL
;
711 /* If we are inside a {start,end}_recording_cases block, then return
712 a chain of CASE_LABEL_EXPRs from T which reference E.
714 Otherwise return NULL. */
717 get_cases_for_edge (edge e
, tree t
)
723 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
724 chains available. Return NULL so the caller can detect this case. */
725 if (!recording_case_labels_p ())
728 slot
= pointer_map_contains (edge_to_cases
, e
);
732 /* If we did not find E in the hash table, then this must be the first
733 time we have been queried for information about E & T. Add all the
734 elements from T to the hash table then perform the query again. */
736 vec
= SWITCH_LABELS (t
);
737 n
= TREE_VEC_LENGTH (vec
);
738 for (i
= 0; i
< n
; i
++)
740 tree elt
= TREE_VEC_ELT (vec
, i
);
741 tree lab
= CASE_LABEL (elt
);
742 basic_block label_bb
= label_to_block (lab
);
743 edge this_edge
= find_edge (e
->src
, label_bb
);
745 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
747 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
748 TREE_CHAIN (elt
) = (tree
) *slot
;
752 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
755 /* Create the edges for a SWITCH_EXPR starting at block BB.
756 At this point, the switch body has been lowered and the
757 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
760 make_switch_expr_edges (basic_block bb
)
762 tree entry
= last_stmt (bb
);
766 vec
= SWITCH_LABELS (entry
);
767 n
= TREE_VEC_LENGTH (vec
);
769 for (i
= 0; i
< n
; ++i
)
771 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
772 basic_block label_bb
= label_to_block (lab
);
773 make_edge (bb
, label_bb
, 0);
778 /* Return the basic block holding label DEST. */
781 label_to_block_fn (struct function
*ifun
, tree dest
)
783 int uid
= LABEL_DECL_UID (dest
);
785 /* We would die hard when faced by an undefined label. Emit a label to
786 the very first basic block. This will hopefully make even the dataflow
787 and undefined variable warnings quite right. */
788 if ((errorcount
|| sorrycount
) && uid
< 0)
790 block_stmt_iterator bsi
=
791 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
794 stmt
= build1 (LABEL_EXPR
, void_type_node
, dest
);
795 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
796 uid
= LABEL_DECL_UID (dest
);
798 if (VEC_length (basic_block
, ifun
->cfg
->x_label_to_block_map
)
799 <= (unsigned int) uid
)
801 return VEC_index (basic_block
, ifun
->cfg
->x_label_to_block_map
, uid
);
804 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
805 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
808 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
810 basic_block target_bb
;
811 block_stmt_iterator bsi
;
813 FOR_EACH_BB (target_bb
)
814 for (bsi
= bsi_start (target_bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
816 tree target
= bsi_stmt (bsi
);
818 if (TREE_CODE (target
) != LABEL_EXPR
)
821 target
= LABEL_EXPR_LABEL (target
);
823 /* Make an edge to every label block that has been marked as a
824 potential target for a computed goto or a non-local goto. */
825 if ((FORCED_LABEL (target
) && !for_call
)
826 || (DECL_NONLOCAL (target
) && for_call
))
828 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
834 /* Create edges for a goto statement at block BB. */
837 make_goto_expr_edges (basic_block bb
)
839 block_stmt_iterator last
= bsi_last (bb
);
840 tree goto_t
= bsi_stmt (last
);
842 /* A simple GOTO creates normal edges. */
843 if (simple_goto_p (goto_t
))
845 tree dest
= GOTO_DESTINATION (goto_t
);
846 edge e
= make_edge (bb
, label_to_block (dest
), EDGE_FALLTHRU
);
847 e
->goto_locus
= EXPR_LOCATION (goto_t
);
848 bsi_remove (&last
, true);
852 /* A computed GOTO creates abnormal edges. */
853 make_abnormal_goto_edges (bb
, false);
857 /*---------------------------------------------------------------------------
859 ---------------------------------------------------------------------------*/
861 /* Cleanup useless labels in basic blocks. This is something we wish
862 to do early because it allows us to group case labels before creating
863 the edges for the CFG, and it speeds up block statement iterators in
865 We rerun this pass after CFG is created, to get rid of the labels that
866 are no longer referenced. After then we do not run it any more, since
867 (almost) no new labels should be created. */
869 /* A map from basic block index to the leading label of that block. */
870 static struct label_record
875 /* True if the label is referenced from somewhere. */
879 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
881 update_eh_label (struct eh_region
*region
)
883 tree old_label
= get_eh_region_tree_label (region
);
887 basic_block bb
= label_to_block (old_label
);
889 /* ??? After optimizing, there may be EH regions with labels
890 that have already been removed from the function body, so
891 there is no basic block for them. */
895 new_label
= label_for_bb
[bb
->index
].label
;
896 label_for_bb
[bb
->index
].used
= true;
897 set_eh_region_tree_label (region
, new_label
);
901 /* Given LABEL return the first label in the same basic block. */
903 main_block_label (tree label
)
905 basic_block bb
= label_to_block (label
);
906 tree main_label
= label_for_bb
[bb
->index
].label
;
908 /* label_to_block possibly inserted undefined label into the chain. */
911 label_for_bb
[bb
->index
].label
= label
;
915 label_for_bb
[bb
->index
].used
= true;
919 /* Cleanup redundant labels. This is a three-step process:
920 1) Find the leading label for each block.
921 2) Redirect all references to labels to the leading labels.
922 3) Cleanup all useless labels. */
925 cleanup_dead_labels (void)
928 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block
);
930 /* Find a suitable label for each block. We use the first user-defined
931 label if there is one, or otherwise just the first label we see. */
934 block_stmt_iterator i
;
936 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
938 tree label
, stmt
= bsi_stmt (i
);
940 if (TREE_CODE (stmt
) != LABEL_EXPR
)
943 label
= LABEL_EXPR_LABEL (stmt
);
945 /* If we have not yet seen a label for the current block,
946 remember this one and see if there are more labels. */
947 if (!label_for_bb
[bb
->index
].label
)
949 label_for_bb
[bb
->index
].label
= label
;
953 /* If we did see a label for the current block already, but it
954 is an artificially created label, replace it if the current
955 label is a user defined label. */
956 if (!DECL_ARTIFICIAL (label
)
957 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
959 label_for_bb
[bb
->index
].label
= label
;
965 /* Now redirect all jumps/branches to the selected label.
966 First do so for each block ending in a control statement. */
969 tree stmt
= last_stmt (bb
);
973 switch (TREE_CODE (stmt
))
977 tree true_branch
, false_branch
;
979 true_branch
= COND_EXPR_THEN (stmt
);
980 false_branch
= COND_EXPR_ELSE (stmt
);
983 GOTO_DESTINATION (true_branch
)
984 = main_block_label (GOTO_DESTINATION (true_branch
));
986 GOTO_DESTINATION (false_branch
)
987 = main_block_label (GOTO_DESTINATION (false_branch
));
995 tree vec
= SWITCH_LABELS (stmt
);
996 size_t n
= TREE_VEC_LENGTH (vec
);
998 /* Replace all destination labels. */
999 for (i
= 0; i
< n
; ++i
)
1001 tree elt
= TREE_VEC_ELT (vec
, i
);
1002 tree label
= main_block_label (CASE_LABEL (elt
));
1003 CASE_LABEL (elt
) = label
;
1008 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1009 remove them until after we've created the CFG edges. */
1011 if (! computed_goto_p (stmt
))
1013 GOTO_DESTINATION (stmt
)
1014 = main_block_label (GOTO_DESTINATION (stmt
));
1023 for_each_eh_region (update_eh_label
);
1025 /* Finally, purge dead labels. All user-defined labels and labels that
1026 can be the target of non-local gotos and labels which have their
1027 address taken are preserved. */
1030 block_stmt_iterator i
;
1031 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1033 if (!label_for_this_bb
)
1036 /* If the main label of the block is unused, we may still remove it. */
1037 if (!label_for_bb
[bb
->index
].used
)
1038 label_for_this_bb
= NULL
;
1040 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
1042 tree label
, stmt
= bsi_stmt (i
);
1044 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1047 label
= LABEL_EXPR_LABEL (stmt
);
1049 if (label
== label_for_this_bb
1050 || ! DECL_ARTIFICIAL (label
)
1051 || DECL_NONLOCAL (label
)
1052 || FORCED_LABEL (label
))
1055 bsi_remove (&i
, true);
1059 free (label_for_bb
);
1062 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1063 and scan the sorted vector of cases. Combine the ones jumping to the
1065 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1068 group_case_labels (void)
1074 tree stmt
= last_stmt (bb
);
1075 if (stmt
&& TREE_CODE (stmt
) == SWITCH_EXPR
)
1077 tree labels
= SWITCH_LABELS (stmt
);
1078 int old_size
= TREE_VEC_LENGTH (labels
);
1079 int i
, j
, new_size
= old_size
;
1080 tree default_case
= NULL_TREE
;
1081 tree default_label
= NULL_TREE
;
1083 /* The default label is always the last case in a switch
1084 statement after gimplification if it was not optimized
1086 if (!CASE_LOW (TREE_VEC_ELT (labels
, old_size
- 1))
1087 && !CASE_HIGH (TREE_VEC_ELT (labels
, old_size
- 1)))
1089 default_case
= TREE_VEC_ELT (labels
, old_size
- 1);
1090 default_label
= CASE_LABEL (default_case
);
1094 /* Look for possible opportunities to merge cases. */
1096 while (i
< old_size
)
1098 tree base_case
, base_label
, base_high
;
1099 base_case
= TREE_VEC_ELT (labels
, i
);
1101 gcc_assert (base_case
);
1102 base_label
= CASE_LABEL (base_case
);
1104 /* Discard cases that have the same destination as the
1106 if (base_label
== default_label
)
1108 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1114 base_high
= CASE_HIGH (base_case
) ?
1115 CASE_HIGH (base_case
) : CASE_LOW (base_case
);
1117 /* Try to merge case labels. Break out when we reach the end
1118 of the label vector or when we cannot merge the next case
1119 label with the current one. */
1120 while (i
< old_size
)
1122 tree merge_case
= TREE_VEC_ELT (labels
, i
);
1123 tree merge_label
= CASE_LABEL (merge_case
);
1124 tree t
= int_const_binop (PLUS_EXPR
, base_high
,
1125 integer_one_node
, 1);
1127 /* Merge the cases if they jump to the same place,
1128 and their ranges are consecutive. */
1129 if (merge_label
== base_label
1130 && tree_int_cst_equal (CASE_LOW (merge_case
), t
))
1132 base_high
= CASE_HIGH (merge_case
) ?
1133 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1134 CASE_HIGH (base_case
) = base_high
;
1135 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1144 /* Compress the case labels in the label vector, and adjust the
1145 length of the vector. */
1146 for (i
= 0, j
= 0; i
< new_size
; i
++)
1148 while (! TREE_VEC_ELT (labels
, j
))
1150 TREE_VEC_ELT (labels
, i
) = TREE_VEC_ELT (labels
, j
++);
1152 TREE_VEC_LENGTH (labels
) = new_size
;
1157 /* Checks whether we can merge block B into block A. */
1160 tree_can_merge_blocks_p (basic_block a
, basic_block b
)
1163 block_stmt_iterator bsi
;
1166 if (!single_succ_p (a
))
1169 if (single_succ_edge (a
)->flags
& EDGE_ABNORMAL
)
1172 if (single_succ (a
) != b
)
1175 if (!single_pred_p (b
))
1178 if (b
== EXIT_BLOCK_PTR
)
1181 /* If A ends by a statement causing exceptions or something similar, we
1182 cannot merge the blocks. */
1183 /* This CONST_CAST is okay because last_stmt doesn't modify its
1184 argument and the return value is assign to a const_tree. */
1185 stmt
= last_stmt (CONST_CAST_BB (a
));
1186 if (stmt
&& stmt_ends_bb_p (stmt
))
1189 /* Do not allow a block with only a non-local label to be merged. */
1190 if (stmt
&& TREE_CODE (stmt
) == LABEL_EXPR
1191 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1194 /* It must be possible to eliminate all phi nodes in B. If ssa form
1195 is not up-to-date, we cannot eliminate any phis; however, if only
1196 some symbols as whole are marked for renaming, this is not a problem,
1197 as phi nodes for those symbols are irrelevant in updating anyway. */
1198 phi
= phi_nodes (b
);
1201 if (name_mappings_registered_p ())
1204 for (; phi
; phi
= PHI_CHAIN (phi
))
1205 if (!is_gimple_reg (PHI_RESULT (phi
))
1206 && !may_propagate_copy (PHI_RESULT (phi
), PHI_ARG_DEF (phi
, 0)))
1210 /* Do not remove user labels. */
1211 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1213 stmt
= bsi_stmt (bsi
);
1214 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1216 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt
)))
1220 /* Protect the loop latches. */
1222 && b
->loop_father
->latch
== b
)
1228 /* Replaces all uses of NAME by VAL. */
1231 replace_uses_by (tree name
, tree val
)
1233 imm_use_iterator imm_iter
;
1238 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1240 if (TREE_CODE (stmt
) != PHI_NODE
)
1241 push_stmt_changes (&stmt
);
1243 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1245 replace_exp (use
, val
);
1247 if (TREE_CODE (stmt
) == PHI_NODE
)
1249 e
= PHI_ARG_EDGE (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1250 if (e
->flags
& EDGE_ABNORMAL
)
1252 /* This can only occur for virtual operands, since
1253 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1254 would prevent replacement. */
1255 gcc_assert (!is_gimple_reg (name
));
1256 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1261 if (TREE_CODE (stmt
) != PHI_NODE
)
1265 fold_stmt_inplace (stmt
);
1266 if (cfgcleanup_altered_bbs
)
1267 bitmap_set_bit (cfgcleanup_altered_bbs
, bb_for_stmt (stmt
)->index
);
1269 /* FIXME. This should go in pop_stmt_changes. */
1270 rhs
= get_rhs (stmt
);
1271 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1272 recompute_tree_invariant_for_addr_expr (rhs
);
1274 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1276 pop_stmt_changes (&stmt
);
1280 gcc_assert (has_zero_uses (name
));
1282 /* Also update the trees stored in loop structures. */
1288 FOR_EACH_LOOP (li
, loop
, 0)
1290 substitute_in_loop_info (loop
, name
, val
);
1295 /* Merge block B into block A. */
1298 tree_merge_blocks (basic_block a
, basic_block b
)
1300 block_stmt_iterator bsi
;
1301 tree_stmt_iterator last
;
1305 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1310 for (phi
= phi_nodes (b
); phi
; phi
= phi_nodes (b
))
1312 tree def
= PHI_RESULT (phi
), use
= PHI_ARG_DEF (phi
, 0);
1314 bool may_replace_uses
= may_propagate_copy (def
, use
);
1316 /* In case we maintain loop closed ssa form, do not propagate arguments
1317 of loop exit phi nodes. */
1319 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1320 && is_gimple_reg (def
)
1321 && TREE_CODE (use
) == SSA_NAME
1322 && a
->loop_father
!= b
->loop_father
)
1323 may_replace_uses
= false;
1325 if (!may_replace_uses
)
1327 gcc_assert (is_gimple_reg (def
));
1329 /* Note that just emitting the copies is fine -- there is no problem
1330 with ordering of phi nodes. This is because A is the single
1331 predecessor of B, therefore results of the phi nodes cannot
1332 appear as arguments of the phi nodes. */
1333 copy
= build_gimple_modify_stmt (def
, use
);
1334 bsi_insert_after (&bsi
, copy
, BSI_NEW_STMT
);
1335 SSA_NAME_DEF_STMT (def
) = copy
;
1336 remove_phi_node (phi
, NULL
, false);
1340 /* If we deal with a PHI for virtual operands, we can simply
1341 propagate these without fussing with folding or updating
1343 if (!is_gimple_reg (def
))
1345 imm_use_iterator iter
;
1346 use_operand_p use_p
;
1349 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1350 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1351 SET_USE (use_p
, use
);
1354 replace_uses_by (def
, use
);
1355 remove_phi_node (phi
, NULL
, true);
1359 /* Ensure that B follows A. */
1360 move_block_after (b
, a
);
1362 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1363 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1365 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1366 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
);)
1368 if (TREE_CODE (bsi_stmt (bsi
)) == LABEL_EXPR
)
1370 tree label
= bsi_stmt (bsi
);
1372 bsi_remove (&bsi
, false);
1373 /* Now that we can thread computed gotos, we might have
1374 a situation where we have a forced label in block B
1375 However, the label at the start of block B might still be
1376 used in other ways (think about the runtime checking for
1377 Fortran assigned gotos). So we can not just delete the
1378 label. Instead we move the label to the start of block A. */
1379 if (FORCED_LABEL (LABEL_EXPR_LABEL (label
)))
1381 block_stmt_iterator dest_bsi
= bsi_start (a
);
1382 bsi_insert_before (&dest_bsi
, label
, BSI_NEW_STMT
);
1387 change_bb_for_stmt (bsi_stmt (bsi
), a
);
1392 /* Merge the chains. */
1393 last
= tsi_last (bb_stmt_list (a
));
1394 tsi_link_after (&last
, bb_stmt_list (b
), TSI_NEW_STMT
);
1395 set_bb_stmt_list (b
, NULL_TREE
);
1397 if (cfgcleanup_altered_bbs
)
1398 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1402 /* Return the one of two successors of BB that is not reachable by a
1403 reached by a complex edge, if there is one. Else, return BB. We use
1404 this in optimizations that use post-dominators for their heuristics,
1405 to catch the cases in C++ where function calls are involved. */
1408 single_noncomplex_succ (basic_block bb
)
1411 if (EDGE_COUNT (bb
->succs
) != 2)
1414 e0
= EDGE_SUCC (bb
, 0);
1415 e1
= EDGE_SUCC (bb
, 1);
1416 if (e0
->flags
& EDGE_COMPLEX
)
1418 if (e1
->flags
& EDGE_COMPLEX
)
1425 /* Walk the function tree removing unnecessary statements.
1427 * Empty statement nodes are removed
1429 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1431 * Unnecessary COND_EXPRs are removed
1433 * Some unnecessary BIND_EXPRs are removed
1435 Clearly more work could be done. The trick is doing the analysis
1436 and removal fast enough to be a net improvement in compile times.
1438 Note that when we remove a control structure such as a COND_EXPR
1439 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1440 to ensure we eliminate all the useless code. */
1451 static void remove_useless_stmts_1 (tree
*, struct rus_data
*);
1454 remove_useless_stmts_warn_notreached (tree stmt
)
1456 if (EXPR_HAS_LOCATION (stmt
))
1458 location_t loc
= EXPR_LOCATION (stmt
);
1459 if (LOCATION_LINE (loc
) > 0)
1461 warning (OPT_Wunreachable_code
, "%Hwill never be executed", &loc
);
1466 switch (TREE_CODE (stmt
))
1468 case STATEMENT_LIST
:
1470 tree_stmt_iterator i
;
1471 for (i
= tsi_start (stmt
); !tsi_end_p (i
); tsi_next (&i
))
1472 if (remove_useless_stmts_warn_notreached (tsi_stmt (i
)))
1478 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt
)))
1480 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt
)))
1482 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt
)))
1486 case TRY_FINALLY_EXPR
:
1487 case TRY_CATCH_EXPR
:
1488 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 0)))
1490 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 1)))
1495 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt
));
1496 case EH_FILTER_EXPR
:
1497 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt
));
1499 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt
));
1502 /* Not a live container. */
1510 remove_useless_stmts_cond (tree
*stmt_p
, struct rus_data
*data
)
1512 tree then_clause
, else_clause
, cond
;
1513 bool save_has_label
, then_has_label
, else_has_label
;
1515 save_has_label
= data
->has_label
;
1516 data
->has_label
= false;
1517 data
->last_goto
= NULL
;
1519 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p
), data
);
1521 then_has_label
= data
->has_label
;
1522 data
->has_label
= false;
1523 data
->last_goto
= NULL
;
1525 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p
), data
);
1527 else_has_label
= data
->has_label
;
1528 data
->has_label
= save_has_label
| then_has_label
| else_has_label
;
1530 then_clause
= COND_EXPR_THEN (*stmt_p
);
1531 else_clause
= COND_EXPR_ELSE (*stmt_p
);
1532 cond
= fold (COND_EXPR_COND (*stmt_p
));
1534 /* If neither arm does anything at all, we can remove the whole IF. */
1535 if (!TREE_SIDE_EFFECTS (then_clause
) && !TREE_SIDE_EFFECTS (else_clause
))
1537 *stmt_p
= build_empty_stmt ();
1538 data
->repeat
= true;
1541 /* If there are no reachable statements in an arm, then we can
1542 zap the entire conditional. */
1543 else if (integer_nonzerop (cond
) && !else_has_label
)
1545 if (warn_notreached
)
1546 remove_useless_stmts_warn_notreached (else_clause
);
1547 *stmt_p
= then_clause
;
1548 data
->repeat
= true;
1550 else if (integer_zerop (cond
) && !then_has_label
)
1552 if (warn_notreached
)
1553 remove_useless_stmts_warn_notreached (then_clause
);
1554 *stmt_p
= else_clause
;
1555 data
->repeat
= true;
1558 /* Check a couple of simple things on then/else with single stmts. */
1561 tree then_stmt
= expr_only (then_clause
);
1562 tree else_stmt
= expr_only (else_clause
);
1564 /* Notice branches to a common destination. */
1565 if (then_stmt
&& else_stmt
1566 && TREE_CODE (then_stmt
) == GOTO_EXPR
1567 && TREE_CODE (else_stmt
) == GOTO_EXPR
1568 && (GOTO_DESTINATION (then_stmt
) == GOTO_DESTINATION (else_stmt
)))
1570 *stmt_p
= then_stmt
;
1571 data
->repeat
= true;
1574 /* If the THEN/ELSE clause merely assigns a value to a variable or
1575 parameter which is already known to contain that value, then
1576 remove the useless THEN/ELSE clause. */
1577 else if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1580 && TREE_CODE (else_stmt
) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (else_stmt
, 0) == cond
1582 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt
, 1)))
1583 COND_EXPR_ELSE (*stmt_p
) = alloc_stmt_list ();
1585 else if ((TREE_CODE (cond
) == EQ_EXPR
|| TREE_CODE (cond
) == NE_EXPR
)
1586 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1587 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1588 && TREE_CONSTANT (TREE_OPERAND (cond
, 1)))
1590 tree stmt
= (TREE_CODE (cond
) == EQ_EXPR
1591 ? then_stmt
: else_stmt
);
1592 tree
*location
= (TREE_CODE (cond
) == EQ_EXPR
1593 ? &COND_EXPR_THEN (*stmt_p
)
1594 : &COND_EXPR_ELSE (*stmt_p
));
1597 && TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
1598 && GIMPLE_STMT_OPERAND (stmt
, 0) == TREE_OPERAND (cond
, 0)
1599 && GIMPLE_STMT_OPERAND (stmt
, 1) == TREE_OPERAND (cond
, 1))
1600 *location
= alloc_stmt_list ();
1604 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1605 would be re-introduced during lowering. */
1606 data
->last_goto
= NULL
;
1611 remove_useless_stmts_tf (tree
*stmt_p
, struct rus_data
*data
)
1613 bool save_may_branch
, save_may_throw
;
1614 bool this_may_branch
, this_may_throw
;
1616 /* Collect may_branch and may_throw information for the body only. */
1617 save_may_branch
= data
->may_branch
;
1618 save_may_throw
= data
->may_throw
;
1619 data
->may_branch
= false;
1620 data
->may_throw
= false;
1621 data
->last_goto
= NULL
;
1623 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1625 this_may_branch
= data
->may_branch
;
1626 this_may_throw
= data
->may_throw
;
1627 data
->may_branch
|= save_may_branch
;
1628 data
->may_throw
|= save_may_throw
;
1629 data
->last_goto
= NULL
;
1631 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1633 /* If the body is empty, then we can emit the FINALLY block without
1634 the enclosing TRY_FINALLY_EXPR. */
1635 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 0)))
1637 *stmt_p
= TREE_OPERAND (*stmt_p
, 1);
1638 data
->repeat
= true;
1641 /* If the handler is empty, then we can emit the TRY block without
1642 the enclosing TRY_FINALLY_EXPR. */
1643 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1645 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1646 data
->repeat
= true;
1649 /* If the body neither throws, nor branches, then we can safely
1650 string the TRY and FINALLY blocks together. */
1651 else if (!this_may_branch
&& !this_may_throw
)
1653 tree stmt
= *stmt_p
;
1654 *stmt_p
= TREE_OPERAND (stmt
, 0);
1655 append_to_statement_list (TREE_OPERAND (stmt
, 1), stmt_p
);
1656 data
->repeat
= true;
1662 remove_useless_stmts_tc (tree
*stmt_p
, struct rus_data
*data
)
1664 bool save_may_throw
, this_may_throw
;
1665 tree_stmt_iterator i
;
1668 /* Collect may_throw information for the body only. */
1669 save_may_throw
= data
->may_throw
;
1670 data
->may_throw
= false;
1671 data
->last_goto
= NULL
;
1673 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1675 this_may_throw
= data
->may_throw
;
1676 data
->may_throw
= save_may_throw
;
1678 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1679 if (!this_may_throw
)
1681 if (warn_notreached
)
1682 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p
, 1));
1683 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1684 data
->repeat
= true;
1688 /* Process the catch clause specially. We may be able to tell that
1689 no exceptions propagate past this point. */
1691 this_may_throw
= true;
1692 i
= tsi_start (TREE_OPERAND (*stmt_p
, 1));
1693 stmt
= tsi_stmt (i
);
1694 data
->last_goto
= NULL
;
1696 switch (TREE_CODE (stmt
))
1699 for (; !tsi_end_p (i
); tsi_next (&i
))
1701 stmt
= tsi_stmt (i
);
1702 /* If we catch all exceptions, then the body does not
1703 propagate exceptions past this point. */
1704 if (CATCH_TYPES (stmt
) == NULL
)
1705 this_may_throw
= false;
1706 data
->last_goto
= NULL
;
1707 remove_useless_stmts_1 (&CATCH_BODY (stmt
), data
);
1711 case EH_FILTER_EXPR
:
1712 if (EH_FILTER_MUST_NOT_THROW (stmt
))
1713 this_may_throw
= false;
1714 else if (EH_FILTER_TYPES (stmt
) == NULL
)
1715 this_may_throw
= false;
1716 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt
), data
);
1720 /* Otherwise this is a cleanup. */
1721 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1723 /* If the cleanup is empty, then we can emit the TRY block without
1724 the enclosing TRY_CATCH_EXPR. */
1725 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1727 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1728 data
->repeat
= true;
1732 data
->may_throw
|= this_may_throw
;
1737 remove_useless_stmts_bind (tree
*stmt_p
, struct rus_data
*data
)
1741 /* First remove anything underneath the BIND_EXPR. */
1742 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p
), data
);
1744 /* If the BIND_EXPR has no variables, then we can pull everything
1745 up one level and remove the BIND_EXPR, unless this is the toplevel
1746 BIND_EXPR for the current function or an inlined function.
1748 When this situation occurs we will want to apply this
1749 optimization again. */
1750 block
= BIND_EXPR_BLOCK (*stmt_p
);
1751 if (BIND_EXPR_VARS (*stmt_p
) == NULL_TREE
1752 && *stmt_p
!= DECL_SAVED_TREE (current_function_decl
)
1754 || ! BLOCK_ABSTRACT_ORIGIN (block
)
1755 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
))
1758 *stmt_p
= BIND_EXPR_BODY (*stmt_p
);
1759 data
->repeat
= true;
1765 remove_useless_stmts_goto (tree
*stmt_p
, struct rus_data
*data
)
1767 tree dest
= GOTO_DESTINATION (*stmt_p
);
1769 data
->may_branch
= true;
1770 data
->last_goto
= NULL
;
1772 /* Record the last goto expr, so that we can delete it if unnecessary. */
1773 if (TREE_CODE (dest
) == LABEL_DECL
)
1774 data
->last_goto
= stmt_p
;
1779 remove_useless_stmts_label (tree
*stmt_p
, struct rus_data
*data
)
1781 tree label
= LABEL_EXPR_LABEL (*stmt_p
);
1783 data
->has_label
= true;
1785 /* We do want to jump across non-local label receiver code. */
1786 if (DECL_NONLOCAL (label
))
1787 data
->last_goto
= NULL
;
1789 else if (data
->last_goto
&& GOTO_DESTINATION (*data
->last_goto
) == label
)
1791 *data
->last_goto
= build_empty_stmt ();
1792 data
->repeat
= true;
1795 /* ??? Add something here to delete unused labels. */
1799 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1800 decl. This allows us to eliminate redundant or useless
1801 calls to "const" functions.
1803 Gimplifier already does the same operation, but we may notice functions
1804 being const and pure once their calls has been gimplified, so we need
1805 to update the flag. */
1808 update_call_expr_flags (tree call
)
1810 tree decl
= get_callee_fndecl (call
);
1814 flags
= call_expr_flags (call
);
1815 if (flags
& (ECF_CONST
| ECF_PURE
) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
1816 TREE_SIDE_EFFECTS (call
) = 0;
1817 if (TREE_NOTHROW (decl
))
1818 TREE_NOTHROW (call
) = 1;
1822 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1825 notice_special_calls (tree t
)
1827 int flags
= call_expr_flags (t
);
1829 if (flags
& ECF_MAY_BE_ALLOCA
)
1830 cfun
->calls_alloca
= true;
1831 if (flags
& ECF_RETURNS_TWICE
)
1832 cfun
->calls_setjmp
= true;
1836 /* Clear flags set by notice_special_calls. Used by dead code removal
1837 to update the flags. */
1840 clear_special_calls (void)
1842 cfun
->calls_alloca
= false;
1843 cfun
->calls_setjmp
= false;
1848 remove_useless_stmts_1 (tree
*tp
, struct rus_data
*data
)
1852 switch (TREE_CODE (t
))
1855 remove_useless_stmts_cond (tp
, data
);
1858 case TRY_FINALLY_EXPR
:
1859 remove_useless_stmts_tf (tp
, data
);
1862 case TRY_CATCH_EXPR
:
1863 remove_useless_stmts_tc (tp
, data
);
1867 remove_useless_stmts_bind (tp
, data
);
1871 remove_useless_stmts_goto (tp
, data
);
1875 remove_useless_stmts_label (tp
, data
);
1880 data
->last_goto
= NULL
;
1881 data
->may_branch
= true;
1886 data
->last_goto
= NULL
;
1887 notice_special_calls (t
);
1888 update_call_expr_flags (t
);
1889 if (tree_could_throw_p (t
))
1890 data
->may_throw
= true;
1896 case GIMPLE_MODIFY_STMT
:
1897 data
->last_goto
= NULL
;
1899 op
= get_call_expr_in (t
);
1902 update_call_expr_flags (op
);
1903 notice_special_calls (op
);
1905 if (tree_could_throw_p (t
))
1906 data
->may_throw
= true;
1909 case STATEMENT_LIST
:
1911 tree_stmt_iterator i
= tsi_start (t
);
1912 while (!tsi_end_p (i
))
1915 if (IS_EMPTY_STMT (t
))
1921 remove_useless_stmts_1 (tsi_stmt_ptr (i
), data
);
1924 if (TREE_CODE (t
) == STATEMENT_LIST
)
1926 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1936 data
->last_goto
= NULL
;
1941 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1943 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp
)), data
);
1944 data
->last_goto
= NULL
;
1953 remove_useless_stmts_1 (&OMP_BODY (*tp
), data
);
1954 data
->last_goto
= NULL
;
1958 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp
), data
);
1959 data
->last_goto
= NULL
;
1960 if (OMP_FOR_PRE_BODY (*tp
))
1962 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp
), data
);
1963 data
->last_goto
= NULL
;
1968 data
->last_goto
= NULL
;
1974 remove_useless_stmts (void)
1976 struct rus_data data
;
1978 clear_special_calls ();
1982 memset (&data
, 0, sizeof (data
));
1983 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl
), &data
);
1985 while (data
.repeat
);
1990 struct gimple_opt_pass pass_remove_useless_stmts
=
1994 "useless", /* name */
1996 remove_useless_stmts
, /* execute */
1999 0, /* static_pass_number */
2001 PROP_gimple_any
, /* properties_required */
2002 0, /* properties_provided */
2003 0, /* properties_destroyed */
2004 0, /* todo_flags_start */
2005 TODO_dump_func
/* todo_flags_finish */
2009 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2012 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2016 /* Since this block is no longer reachable, we can just delete all
2017 of its PHI nodes. */
2018 phi
= phi_nodes (bb
);
2021 tree next
= PHI_CHAIN (phi
);
2022 remove_phi_node (phi
, NULL_TREE
, true);
2026 /* Remove edges to BB's successors. */
2027 while (EDGE_COUNT (bb
->succs
) > 0)
2028 remove_edge (EDGE_SUCC (bb
, 0));
2032 /* Remove statements of basic block BB. */
2035 remove_bb (basic_block bb
)
2037 block_stmt_iterator i
;
2038 source_location loc
= UNKNOWN_LOCATION
;
2042 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2043 if (dump_flags
& TDF_DETAILS
)
2045 dump_bb (bb
, dump_file
, 0);
2046 fprintf (dump_file
, "\n");
2052 struct loop
*loop
= bb
->loop_father
;
2054 /* If a loop gets removed, clean up the information associated
2056 if (loop
->latch
== bb
2057 || loop
->header
== bb
)
2058 free_numbers_of_iterations_estimates_loop (loop
);
2061 /* Remove all the instructions in the block. */
2062 if (bb_stmt_list (bb
) != NULL_TREE
)
2064 for (i
= bsi_start (bb
); !bsi_end_p (i
);)
2066 tree stmt
= bsi_stmt (i
);
2067 if (TREE_CODE (stmt
) == LABEL_EXPR
2068 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt
))
2069 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
))))
2072 block_stmt_iterator new_bsi
;
2074 /* A non-reachable non-local label may still be referenced.
2075 But it no longer needs to carry the extra semantics of
2077 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
2079 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)) = 0;
2080 FORCED_LABEL (LABEL_EXPR_LABEL (stmt
)) = 1;
2083 new_bb
= bb
->prev_bb
;
2084 new_bsi
= bsi_start (new_bb
);
2085 bsi_remove (&i
, false);
2086 bsi_insert_before (&new_bsi
, stmt
, BSI_NEW_STMT
);
2090 /* Release SSA definitions if we are in SSA. Note that we
2091 may be called when not in SSA. For example,
2092 final_cleanup calls this function via
2093 cleanup_tree_cfg. */
2094 if (gimple_in_ssa_p (cfun
))
2095 release_defs (stmt
);
2097 bsi_remove (&i
, true);
2100 /* Don't warn for removed gotos. Gotos are often removed due to
2101 jump threading, thus resulting in bogus warnings. Not great,
2102 since this way we lose warnings for gotos in the original
2103 program that are indeed unreachable. */
2104 if (TREE_CODE (stmt
) != GOTO_EXPR
&& EXPR_HAS_LOCATION (stmt
) && !loc
)
2106 if (EXPR_HAS_LOCATION (stmt
))
2107 loc
= EXPR_LOCATION (stmt
);
2112 /* If requested, give a warning that the first statement in the
2113 block is unreachable. We walk statements backwards in the
2114 loop above, so the last statement we process is the first statement
2116 if (loc
> BUILTINS_LOCATION
&& LOCATION_LINE (loc
) > 0)
2117 warning (OPT_Wunreachable_code
, "%Hwill never be executed", &loc
);
2119 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2124 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2125 predicate VAL, return the edge that will be taken out of the block.
2126 If VAL does not match a unique edge, NULL is returned. */
2129 find_taken_edge (basic_block bb
, tree val
)
2133 stmt
= last_stmt (bb
);
2136 gcc_assert (is_ctrl_stmt (stmt
));
2139 if (! is_gimple_min_invariant (val
))
2142 if (TREE_CODE (stmt
) == COND_EXPR
)
2143 return find_taken_edge_cond_expr (bb
, val
);
2145 if (TREE_CODE (stmt
) == SWITCH_EXPR
)
2146 return find_taken_edge_switch_expr (bb
, val
);
2148 if (computed_goto_p (stmt
))
2150 /* Only optimize if the argument is a label, if the argument is
2151 not a label then we can not construct a proper CFG.
2153 It may be the case that we only need to allow the LABEL_REF to
2154 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2155 appear inside a LABEL_EXPR just to be safe. */
2156 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2157 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2158 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2165 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2166 statement, determine which of the outgoing edges will be taken out of the
2167 block. Return NULL if either edge may be taken. */
2170 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2175 dest
= label_to_block (val
);
2178 e
= find_edge (bb
, dest
);
2179 gcc_assert (e
!= NULL
);
2185 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2186 statement, determine which of the two edges will be taken out of the
2187 block. Return NULL if either edge may be taken. */
2190 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2192 edge true_edge
, false_edge
;
2194 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2196 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2197 return (integer_zerop (val
) ? false_edge
: true_edge
);
2200 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2201 statement, determine which edge will be taken out of the block. Return
2202 NULL if any edge may be taken. */
2205 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2207 tree switch_expr
, taken_case
;
2208 basic_block dest_bb
;
2211 switch_expr
= last_stmt (bb
);
2212 taken_case
= find_case_label_for_value (switch_expr
, val
);
2213 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2215 e
= find_edge (bb
, dest_bb
);
2221 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2222 We can make optimal use here of the fact that the case labels are
2223 sorted: We can do a binary search for a case matching VAL. */
2226 find_case_label_for_value (tree switch_expr
, tree val
)
2228 tree vec
= SWITCH_LABELS (switch_expr
);
2229 size_t low
, high
, n
= TREE_VEC_LENGTH (vec
);
2230 tree default_case
= TREE_VEC_ELT (vec
, n
- 1);
2232 for (low
= -1, high
= n
- 1; high
- low
> 1; )
2234 size_t i
= (high
+ low
) / 2;
2235 tree t
= TREE_VEC_ELT (vec
, i
);
2238 /* Cache the result of comparing CASE_LOW and val. */
2239 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2246 if (CASE_HIGH (t
) == NULL
)
2248 /* A singe-valued case label. */
2254 /* A case range. We can only handle integer ranges. */
2255 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2260 return default_case
;
2266 /*---------------------------------------------------------------------------
2268 ---------------------------------------------------------------------------*/
2270 /* Dump tree-specific information of block BB to file OUTF. */
2273 tree_dump_bb (basic_block bb
, FILE *outf
, int indent
)
2275 dump_generic_bb (outf
, bb
, indent
, TDF_VOPS
|TDF_MEMSYMS
);
2279 /* Dump a basic block on stderr. */
2282 debug_tree_bb (basic_block bb
)
2284 dump_bb (bb
, stderr
, 0);
2288 /* Dump basic block with index N on stderr. */
2291 debug_tree_bb_n (int n
)
2293 debug_tree_bb (BASIC_BLOCK (n
));
2294 return BASIC_BLOCK (n
);
2298 /* Dump the CFG on stderr.
2300 FLAGS are the same used by the tree dumping functions
2301 (see TDF_* in tree-pass.h). */
2304 debug_tree_cfg (int flags
)
2306 dump_tree_cfg (stderr
, flags
);
2310 /* Dump the program showing basic block boundaries on the given FILE.
2312 FLAGS are the same used by the tree dumping functions (see TDF_* in
2316 dump_tree_cfg (FILE *file
, int flags
)
2318 if (flags
& TDF_DETAILS
)
2320 const char *funcname
2321 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2324 fprintf (file
, ";; Function %s\n\n", funcname
);
2325 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2326 n_basic_blocks
, n_edges
, last_basic_block
);
2328 brief_dump_cfg (file
);
2329 fprintf (file
, "\n");
2332 if (flags
& TDF_STATS
)
2333 dump_cfg_stats (file
);
2335 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2339 /* Dump CFG statistics on FILE. */
2342 dump_cfg_stats (FILE *file
)
2344 static long max_num_merged_labels
= 0;
2345 unsigned long size
, total
= 0;
2348 const char * const fmt_str
= "%-30s%-13s%12s\n";
2349 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2350 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2351 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2352 const char *funcname
2353 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2356 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2358 fprintf (file
, "---------------------------------------------------------\n");
2359 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2360 fprintf (file
, fmt_str
, "", " instances ", "used ");
2361 fprintf (file
, "---------------------------------------------------------\n");
2363 size
= n_basic_blocks
* sizeof (struct basic_block_def
);
2365 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks
,
2366 SCALE (size
), LABEL (size
));
2370 num_edges
+= EDGE_COUNT (bb
->succs
);
2371 size
= num_edges
* sizeof (struct edge_def
);
2373 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2375 fprintf (file
, "---------------------------------------------------------\n");
2376 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2378 fprintf (file
, "---------------------------------------------------------\n");
2379 fprintf (file
, "\n");
2381 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2382 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2384 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2387 fprintf (file
, "\n");
2391 /* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2395 debug_cfg_stats (void)
2397 dump_cfg_stats (stderr
);
2401 /* Dump the flowgraph to a .vcg FILE. */
2404 tree_cfg2vcg (FILE *file
)
2409 const char *funcname
2410 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2412 /* Write the file header. */
2413 fprintf (file
, "graph: { title: \"%s\"\n", funcname
);
2414 fprintf (file
, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2415 fprintf (file
, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2417 /* Write blocks and edges. */
2418 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2420 fprintf (file
, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2423 if (e
->flags
& EDGE_FAKE
)
2424 fprintf (file
, " linestyle: dotted priority: 10");
2426 fprintf (file
, " linestyle: solid priority: 100");
2428 fprintf (file
, " }\n");
2434 enum tree_code head_code
, end_code
;
2435 const char *head_name
, *end_name
;
2438 tree first
= first_stmt (bb
);
2439 tree last
= last_stmt (bb
);
2443 head_code
= TREE_CODE (first
);
2444 head_name
= tree_code_name
[head_code
];
2445 head_line
= get_lineno (first
);
2448 head_name
= "no-statement";
2452 end_code
= TREE_CODE (last
);
2453 end_name
= tree_code_name
[end_code
];
2454 end_line
= get_lineno (last
);
2457 end_name
= "no-statement";
2459 fprintf (file
, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2460 bb
->index
, bb
->index
, head_name
, head_line
, end_name
,
2463 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2465 if (e
->dest
== EXIT_BLOCK_PTR
)
2466 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb
->index
);
2468 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb
->index
, e
->dest
->index
);
2470 if (e
->flags
& EDGE_FAKE
)
2471 fprintf (file
, " priority: 10 linestyle: dotted");
2473 fprintf (file
, " priority: 100 linestyle: solid");
2475 fprintf (file
, " }\n");
2478 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2482 fputs ("}\n\n", file
);
2487 /*---------------------------------------------------------------------------
2488 Miscellaneous helpers
2489 ---------------------------------------------------------------------------*/
2491 /* Return true if T represents a stmt that always transfers control. */
2494 is_ctrl_stmt (const_tree t
)
2496 return (TREE_CODE (t
) == COND_EXPR
2497 || TREE_CODE (t
) == SWITCH_EXPR
2498 || TREE_CODE (t
) == GOTO_EXPR
2499 || TREE_CODE (t
) == RETURN_EXPR
2500 || TREE_CODE (t
) == RESX_EXPR
);
2504 /* Return true if T is a statement that may alter the flow of control
2505 (e.g., a call to a non-returning function). */
2508 is_ctrl_altering_stmt (const_tree t
)
2513 call
= get_call_expr_in (CONST_CAST_TREE (t
));
2516 /* A non-pure/const CALL_EXPR alters flow control if the current
2517 function has nonlocal labels. */
2518 if (TREE_SIDE_EFFECTS (call
) && cfun
->has_nonlocal_label
)
2521 /* A CALL_EXPR also alters control flow if it does not return. */
2522 if (call_expr_flags (call
) & ECF_NORETURN
)
2526 /* OpenMP directives alter control flow. */
2527 if (OMP_DIRECTIVE_P (t
))
2530 /* If a statement can throw, it alters control flow. */
2531 return tree_can_throw_internal (t
);
2535 /* Return true if T is a computed goto. */
2538 computed_goto_p (const_tree t
)
2540 return (TREE_CODE (t
) == GOTO_EXPR
2541 && TREE_CODE (GOTO_DESTINATION (t
)) != LABEL_DECL
);
2545 /* Return true if T is a simple local goto. */
2548 simple_goto_p (const_tree t
)
2550 return (TREE_CODE (t
) == GOTO_EXPR
2551 && TREE_CODE (GOTO_DESTINATION (t
)) == LABEL_DECL
);
2555 /* Return true if T can make an abnormal transfer of control flow.
2556 Transfers of control flow associated with EH are excluded. */
2559 tree_can_make_abnormal_goto (const_tree t
)
2561 if (computed_goto_p (t
))
2563 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
2564 t
= GIMPLE_STMT_OPERAND (t
, 1);
2565 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2566 t
= TREE_OPERAND (t
, 0);
2567 if (TREE_CODE (t
) == CALL_EXPR
)
2568 return TREE_SIDE_EFFECTS (t
) && cfun
->has_nonlocal_label
;
2573 /* Return true if T should start a new basic block. PREV_T is the
2574 statement preceding T. It is used when T is a label or a case label.
2575 Labels should only start a new basic block if their previous statement
2576 wasn't a label. Otherwise, sequence of labels would generate
2577 unnecessary basic blocks that only contain a single label. */
2580 stmt_starts_bb_p (const_tree t
, const_tree prev_t
)
2585 /* LABEL_EXPRs start a new basic block only if the preceding
2586 statement wasn't a label of the same type. This prevents the
2587 creation of consecutive blocks that have nothing but a single
2589 if (TREE_CODE (t
) == LABEL_EXPR
)
2591 /* Nonlocal and computed GOTO targets always start a new block. */
2592 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t
))
2593 || FORCED_LABEL (LABEL_EXPR_LABEL (t
)))
2596 if (prev_t
&& TREE_CODE (prev_t
) == LABEL_EXPR
)
2598 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t
)))
2601 cfg_stats
.num_merged_labels
++;
2612 /* Return true if T should end a basic block. */
2615 stmt_ends_bb_p (const_tree t
)
2617 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2620 /* Remove block annotations and other datastructures. */
2623 delete_tree_cfg_annotations (void)
2626 block_stmt_iterator bsi
;
2628 /* Remove annotations from every tree in the function. */
2630 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
2632 tree stmt
= bsi_stmt (bsi
);
2633 ggc_free (stmt
->base
.ann
);
2634 stmt
->base
.ann
= NULL
;
2636 label_to_block_map
= NULL
;
2640 /* Return the first statement in basic block BB. */
2643 first_stmt (basic_block bb
)
2645 block_stmt_iterator i
= bsi_start (bb
);
2646 return !bsi_end_p (i
) ? bsi_stmt (i
) : NULL_TREE
;
2649 /* Return the last statement in basic block BB. */
2652 last_stmt (basic_block bb
)
2654 block_stmt_iterator b
= bsi_last (bb
);
2655 return !bsi_end_p (b
) ? bsi_stmt (b
) : NULL_TREE
;
2658 /* Return the last statement of an otherwise empty block. Return NULL
2659 if the block is totally empty, or if it contains more than one
2663 last_and_only_stmt (basic_block bb
)
2665 block_stmt_iterator i
= bsi_last (bb
);
2671 last
= bsi_stmt (i
);
2676 /* Empty statements should no longer appear in the instruction stream.
2677 Everything that might have appeared before should be deleted by
2678 remove_useless_stmts, and the optimizers should just bsi_remove
2679 instead of smashing with build_empty_stmt.
2681 Thus the only thing that should appear here in a block containing
2682 one executable statement is a label. */
2683 prev
= bsi_stmt (i
);
2684 if (TREE_CODE (prev
) == LABEL_EXPR
)
2691 /* Mark BB as the basic block holding statement T. */
2694 set_bb_for_stmt (tree t
, basic_block bb
)
2696 if (TREE_CODE (t
) == PHI_NODE
)
2698 else if (TREE_CODE (t
) == STATEMENT_LIST
)
2700 tree_stmt_iterator i
;
2701 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2702 set_bb_for_stmt (tsi_stmt (i
), bb
);
2706 stmt_ann_t ann
= get_stmt_ann (t
);
2709 /* If the statement is a label, add the label to block-to-labels map
2710 so that we can speed up edge creation for GOTO_EXPRs. */
2711 if (TREE_CODE (t
) == LABEL_EXPR
)
2715 t
= LABEL_EXPR_LABEL (t
);
2716 uid
= LABEL_DECL_UID (t
);
2719 unsigned old_len
= VEC_length (basic_block
, label_to_block_map
);
2720 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
2721 if (old_len
<= (unsigned) uid
)
2723 unsigned new_len
= 3 * uid
/ 2;
2725 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
2730 /* We're moving an existing label. Make sure that we've
2731 removed it from the old block. */
2733 || !VEC_index (basic_block
, label_to_block_map
, uid
));
2734 VEC_replace (basic_block
, label_to_block_map
, uid
, bb
);
2739 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2740 from one basic block to another.
2741 For BB splitting we can run into quadratic case, so performance is quite
2742 important and knowing that the tables are big enough, change_bb_for_stmt
2743 can inline as leaf function. */
2745 change_bb_for_stmt (tree t
, basic_block bb
)
2747 get_stmt_ann (t
)->bb
= bb
;
2748 if (TREE_CODE (t
) == LABEL_EXPR
)
2749 VEC_replace (basic_block
, label_to_block_map
,
2750 LABEL_DECL_UID (LABEL_EXPR_LABEL (t
)), bb
);
2753 /* Finds iterator for STMT. */
2755 extern block_stmt_iterator
2756 bsi_for_stmt (tree stmt
)
2758 block_stmt_iterator bsi
;
2760 for (bsi
= bsi_start (bb_for_stmt (stmt
)); !bsi_end_p (bsi
); bsi_next (&bsi
))
2761 if (bsi_stmt (bsi
) == stmt
)
2767 /* Mark statement T as modified, and update it. */
2769 update_modified_stmts (tree t
)
2771 if (!ssa_operands_active ())
2773 if (TREE_CODE (t
) == STATEMENT_LIST
)
2775 tree_stmt_iterator i
;
2777 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2779 stmt
= tsi_stmt (i
);
2780 update_stmt_if_modified (stmt
);
2784 update_stmt_if_modified (t
);
2787 /* Insert statement (or statement list) T before the statement
2788 pointed-to by iterator I. M specifies how to update iterator I
2789 after insertion (see enum bsi_iterator_update). */
2792 bsi_insert_before (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2794 set_bb_for_stmt (t
, i
->bb
);
2795 update_modified_stmts (t
);
2796 tsi_link_before (&i
->tsi
, t
, m
);
2800 /* Insert statement (or statement list) T after the statement
2801 pointed-to by iterator I. M specifies how to update iterator I
2802 after insertion (see enum bsi_iterator_update). */
2805 bsi_insert_after (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2807 set_bb_for_stmt (t
, i
->bb
);
2808 update_modified_stmts (t
);
2809 tsi_link_after (&i
->tsi
, t
, m
);
2813 /* Remove the statement pointed to by iterator I. The iterator is updated
2814 to the next statement.
2816 When REMOVE_EH_INFO is true we remove the statement pointed to by
2817 iterator I from the EH tables. Otherwise we do not modify the EH
2820 Generally, REMOVE_EH_INFO should be true when the statement is going to
2821 be removed from the IL and not reinserted elsewhere. */
2824 bsi_remove (block_stmt_iterator
*i
, bool remove_eh_info
)
2826 tree t
= bsi_stmt (*i
);
2827 set_bb_for_stmt (t
, NULL
);
2828 delink_stmt_imm_use (t
);
2829 tsi_delink (&i
->tsi
);
2830 mark_stmt_modified (t
);
2833 remove_stmt_from_eh_region (t
);
2834 gimple_remove_stmt_histograms (cfun
, t
);
2839 /* Move the statement at FROM so it comes right after the statement at TO. */
2842 bsi_move_after (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2844 tree stmt
= bsi_stmt (*from
);
2845 bsi_remove (from
, false);
2846 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2847 move statements to an empty block. */
2848 bsi_insert_after (to
, stmt
, BSI_NEW_STMT
);
2852 /* Move the statement at FROM so it comes right before the statement at TO. */
2855 bsi_move_before (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2857 tree stmt
= bsi_stmt (*from
);
2858 bsi_remove (from
, false);
2859 /* For consistency with bsi_move_after, it might be better to have
2860 BSI_NEW_STMT here; however, that breaks several places that expect
2861 that TO does not change. */
2862 bsi_insert_before (to
, stmt
, BSI_SAME_STMT
);
2866 /* Move the statement at FROM to the end of basic block BB. */
2869 bsi_move_to_bb_end (block_stmt_iterator
*from
, basic_block bb
)
2871 block_stmt_iterator last
= bsi_last (bb
);
2873 /* Have to check bsi_end_p because it could be an empty block. */
2874 if (!bsi_end_p (last
) && is_ctrl_stmt (bsi_stmt (last
)))
2875 bsi_move_before (from
, &last
);
2877 bsi_move_after (from
, &last
);
2881 /* Replace the contents of the statement pointed to by iterator BSI
2882 with STMT. If UPDATE_EH_INFO is true, the exception handling
2883 information of the original statement is moved to the new statement. */
2886 bsi_replace (const block_stmt_iterator
*bsi
, tree stmt
, bool update_eh_info
)
2889 tree orig_stmt
= bsi_stmt (*bsi
);
2891 if (stmt
== orig_stmt
)
2893 SET_EXPR_LOCUS (stmt
, EXPR_LOCUS (orig_stmt
));
2894 set_bb_for_stmt (stmt
, bsi
->bb
);
2896 /* Preserve EH region information from the original statement, if
2897 requested by the caller. */
2900 eh_region
= lookup_stmt_eh_region (orig_stmt
);
2903 remove_stmt_from_eh_region (orig_stmt
);
2904 add_stmt_to_eh_region (stmt
, eh_region
);
2908 gimple_duplicate_stmt_histograms (cfun
, stmt
, cfun
, orig_stmt
);
2909 gimple_remove_stmt_histograms (cfun
, orig_stmt
);
2910 delink_stmt_imm_use (orig_stmt
);
2911 *bsi_stmt_ptr (*bsi
) = stmt
;
2912 mark_stmt_modified (stmt
);
2913 update_modified_stmts (stmt
);
2917 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2918 is made to place the statement in an existing basic block, but
2919 sometimes that isn't possible. When it isn't possible, the edge is
2920 split and the statement is added to the new block.
2922 In all cases, the returned *BSI points to the correct location. The
2923 return value is true if insertion should be done after the location,
2924 or false if it should be done before the location. If new basic block
2925 has to be created, it is stored in *NEW_BB. */
2928 tree_find_edge_insert_loc (edge e
, block_stmt_iterator
*bsi
,
2929 basic_block
*new_bb
)
2931 basic_block dest
, src
;
2937 /* If the destination has one predecessor which has no PHI nodes,
2938 insert there. Except for the exit block.
2940 The requirement for no PHI nodes could be relaxed. Basically we
2941 would have to examine the PHIs to prove that none of them used
2942 the value set by the statement we want to insert on E. That
2943 hardly seems worth the effort. */
2944 if (single_pred_p (dest
)
2945 && ! phi_nodes (dest
)
2946 && dest
!= EXIT_BLOCK_PTR
)
2948 *bsi
= bsi_start (dest
);
2949 if (bsi_end_p (*bsi
))
2952 /* Make sure we insert after any leading labels. */
2953 tmp
= bsi_stmt (*bsi
);
2954 while (TREE_CODE (tmp
) == LABEL_EXPR
)
2957 if (bsi_end_p (*bsi
))
2959 tmp
= bsi_stmt (*bsi
);
2962 if (bsi_end_p (*bsi
))
2964 *bsi
= bsi_last (dest
);
2971 /* If the source has one successor, the edge is not abnormal and
2972 the last statement does not end a basic block, insert there.
2973 Except for the entry block. */
2975 if ((e
->flags
& EDGE_ABNORMAL
) == 0
2976 && single_succ_p (src
)
2977 && src
!= ENTRY_BLOCK_PTR
)
2979 *bsi
= bsi_last (src
);
2980 if (bsi_end_p (*bsi
))
2983 tmp
= bsi_stmt (*bsi
);
2984 if (!stmt_ends_bb_p (tmp
))
2987 /* Insert code just before returning the value. We may need to decompose
2988 the return in the case it contains non-trivial operand. */
2989 if (TREE_CODE (tmp
) == RETURN_EXPR
)
2991 tree op
= TREE_OPERAND (tmp
, 0);
2992 if (op
&& !is_gimple_val (op
))
2994 gcc_assert (TREE_CODE (op
) == GIMPLE_MODIFY_STMT
);
2995 bsi_insert_before (bsi
, op
, BSI_NEW_STMT
);
2996 TREE_OPERAND (tmp
, 0) = GIMPLE_STMT_OPERAND (op
, 0);
3003 /* Otherwise, create a new basic block, and split this edge. */
3004 dest
= split_edge (e
);
3007 e
= single_pred_edge (dest
);
3012 /* This routine will commit all pending edge insertions, creating any new
3013 basic blocks which are necessary. */
3016 bsi_commit_edge_inserts (void)
3022 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR
), NULL
);
3025 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3026 bsi_commit_one_edge_insert (e
, NULL
);
3030 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3031 to this block, otherwise set it to NULL. */
3034 bsi_commit_one_edge_insert (edge e
, basic_block
*new_bb
)
3038 if (PENDING_STMT (e
))
3040 block_stmt_iterator bsi
;
3041 tree stmt
= PENDING_STMT (e
);
3043 PENDING_STMT (e
) = NULL_TREE
;
3045 if (tree_find_edge_insert_loc (e
, &bsi
, new_bb
))
3046 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3048 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3053 /* Add STMT to the pending list of edge E. No actual insertion is
3054 made until a call to bsi_commit_edge_inserts () is made. */
3057 bsi_insert_on_edge (edge e
, tree stmt
)
3059 append_to_statement_list (stmt
, &PENDING_STMT (e
));
3062 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3063 block has to be created, it is returned. */
3066 bsi_insert_on_edge_immediate (edge e
, tree stmt
)
3068 block_stmt_iterator bsi
;
3069 basic_block new_bb
= NULL
;
3071 gcc_assert (!PENDING_STMT (e
));
3073 if (tree_find_edge_insert_loc (e
, &bsi
, &new_bb
))
3074 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3076 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3081 /*---------------------------------------------------------------------------
3082 Tree specific functions for CFG manipulation
3083 ---------------------------------------------------------------------------*/
3085 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3088 reinstall_phi_args (edge new_edge
, edge old_edge
)
3091 edge_var_map_vector v
;
3095 v
= redirect_edge_var_map_vector (old_edge
);
3099 for (i
= 0, phi
= phi_nodes (new_edge
->dest
);
3100 VEC_iterate (edge_var_map
, v
, i
, vm
) && phi
;
3101 i
++, phi
= PHI_CHAIN (phi
))
3103 tree result
= redirect_edge_var_map_result (vm
);
3104 tree arg
= redirect_edge_var_map_def (vm
);
3106 gcc_assert (result
== PHI_RESULT (phi
));
3108 add_phi_arg (phi
, arg
, new_edge
);
3111 redirect_edge_var_map_clear (old_edge
);
3114 /* Returns the basic block after which the new basic block created
3115 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3116 near its "logical" location. This is of most help to humans looking
3117 at debugging dumps. */
3120 split_edge_bb_loc (edge edge_in
)
3122 basic_block dest
= edge_in
->dest
;
3124 if (dest
->prev_bb
&& find_edge (dest
->prev_bb
, dest
))
3125 return edge_in
->src
;
3127 return dest
->prev_bb
;
3130 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3131 Abort on abnormal edges. */
3134 tree_split_edge (edge edge_in
)
3136 basic_block new_bb
, after_bb
, dest
;
3139 /* Abnormal edges cannot be split. */
3140 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
3142 dest
= edge_in
->dest
;
3144 after_bb
= split_edge_bb_loc (edge_in
);
3146 new_bb
= create_empty_bb (after_bb
);
3147 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
3148 new_bb
->count
= edge_in
->count
;
3149 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
3150 new_edge
->probability
= REG_BR_PROB_BASE
;
3151 new_edge
->count
= edge_in
->count
;
3153 e
= redirect_edge_and_branch (edge_in
, new_bb
);
3154 gcc_assert (e
== edge_in
);
3155 reinstall_phi_args (new_edge
, e
);
3160 /* Callback for walk_tree, check that all elements with address taken are
3161 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3162 inside a PHI node. */
3165 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3172 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3173 #define CHECK_OP(N, MSG) \
3174 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3175 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3177 switch (TREE_CODE (t
))
3180 if (SSA_NAME_IN_FREE_LIST (t
))
3182 error ("SSA name in freelist but still referenced");
3188 x
= fold (ASSERT_EXPR_COND (t
));
3189 if (x
== boolean_false_node
)
3191 error ("ASSERT_EXPR with an always-false condition");
3199 case GIMPLE_MODIFY_STMT
:
3200 x
= GIMPLE_STMT_OPERAND (t
, 0);
3201 if (TREE_CODE (x
) == BIT_FIELD_REF
3202 && is_gimple_reg (TREE_OPERAND (x
, 0)))
3204 error ("GIMPLE register modified with BIT_FIELD_REF");
3212 bool old_side_effects
;
3214 bool new_side_effects
;
3216 gcc_assert (is_gimple_address (t
));
3218 old_constant
= TREE_CONSTANT (t
);
3219 old_side_effects
= TREE_SIDE_EFFECTS (t
);
3221 recompute_tree_invariant_for_addr_expr (t
);
3222 new_side_effects
= TREE_SIDE_EFFECTS (t
);
3223 new_constant
= TREE_CONSTANT (t
);
3225 if (old_constant
!= new_constant
)
3227 error ("constant not recomputed when ADDR_EXPR changed");
3230 if (old_side_effects
!= new_side_effects
)
3232 error ("side effects not recomputed when ADDR_EXPR changed");
3236 /* Skip any references (they will be checked when we recurse down the
3237 tree) and ensure that any variable used as a prefix is marked
3239 for (x
= TREE_OPERAND (t
, 0);
3240 handled_component_p (x
);
3241 x
= TREE_OPERAND (x
, 0))
3244 if (TREE_CODE (x
) != VAR_DECL
&& TREE_CODE (x
) != PARM_DECL
)
3246 if (!TREE_ADDRESSABLE (x
))
3248 error ("address taken, but ADDRESSABLE bit not set");
3256 x
= COND_EXPR_COND (t
);
3257 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
3259 error ("non-integral used in condition");
3262 if (!is_gimple_condexpr (x
))
3264 error ("invalid conditional operand");
3269 case NON_LVALUE_EXPR
:
3273 case FIX_TRUNC_EXPR
:
3278 case TRUTH_NOT_EXPR
:
3279 CHECK_OP (0, "invalid operand to unary operator");
3286 case ARRAY_RANGE_REF
:
3288 case VIEW_CONVERT_EXPR
:
3289 /* We have a nest of references. Verify that each of the operands
3290 that determine where to reference is either a constant or a variable,
3291 verify that the base is valid, and then show we've already checked
3293 while (handled_component_p (t
))
3295 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3296 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3297 else if (TREE_CODE (t
) == ARRAY_REF
3298 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3300 CHECK_OP (1, "invalid array index");
3301 if (TREE_OPERAND (t
, 2))
3302 CHECK_OP (2, "invalid array lower bound");
3303 if (TREE_OPERAND (t
, 3))
3304 CHECK_OP (3, "invalid array stride");
3306 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
3308 if (!host_integerp (TREE_OPERAND (t
, 1), 1)
3309 || !host_integerp (TREE_OPERAND (t
, 2), 1))
3311 error ("invalid position or size operand to BIT_FIELD_REF");
3314 else if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3315 && (TYPE_PRECISION (TREE_TYPE (t
))
3316 != TREE_INT_CST_LOW (TREE_OPERAND (t
, 1))))
3318 error ("integral result type precision does not match "
3319 "field size of BIT_FIELD_REF");
3322 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3323 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
3324 != TREE_INT_CST_LOW (TREE_OPERAND (t
, 1))))
3326 error ("mode precision of non-integral result does not "
3327 "match field size of BIT_FIELD_REF");
3332 t
= TREE_OPERAND (t
, 0);
3335 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3337 error ("invalid reference prefix");
3344 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3345 POINTER_PLUS_EXPR. */
3346 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3348 error ("invalid operand to plus/minus, type is a pointer");
3351 CHECK_OP (0, "invalid operand to binary operator");
3352 CHECK_OP (1, "invalid operand to binary operator");
3355 case POINTER_PLUS_EXPR
:
3356 /* Check to make sure the first operand is a pointer or reference type. */
3357 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3359 error ("invalid operand to pointer plus, first operand is not a pointer");
3362 /* Check to make sure the second operand is an integer with type of
3364 if (!useless_type_conversion_p (sizetype
,
3365 TREE_TYPE (TREE_OPERAND (t
, 1))))
3367 error ("invalid operand to pointer plus, second operand is not an "
3368 "integer with type of sizetype.");
3378 case UNORDERED_EXPR
:
3387 case TRUNC_DIV_EXPR
:
3389 case FLOOR_DIV_EXPR
:
3390 case ROUND_DIV_EXPR
:
3391 case TRUNC_MOD_EXPR
:
3393 case FLOOR_MOD_EXPR
:
3394 case ROUND_MOD_EXPR
:
3396 case EXACT_DIV_EXPR
:
3406 CHECK_OP (0, "invalid operand to binary operator");
3407 CHECK_OP (1, "invalid operand to binary operator");
3411 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3423 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3424 if there is an error, otherwise false. */
3427 verify_gimple_unary_expr (const_tree expr
)
3429 tree op
= TREE_OPERAND (expr
, 0);
3430 tree type
= TREE_TYPE (expr
);
3432 if (!is_gimple_val (op
))
3434 error ("invalid operand in unary expression");
3438 /* For general unary expressions we have the operations type
3439 as the effective type the operation is carried out on. So all
3440 we need to require is that the operand is trivially convertible
3442 if (!useless_type_conversion_p (type
, TREE_TYPE (op
)))
3444 error ("type mismatch in unary expression");
3445 debug_generic_expr (type
);
3446 debug_generic_expr (TREE_TYPE (op
));
3453 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3454 if there is an error, otherwise false. */
3457 verify_gimple_binary_expr (const_tree expr
)
3459 tree op0
= TREE_OPERAND (expr
, 0);
3460 tree op1
= TREE_OPERAND (expr
, 1);
3461 tree type
= TREE_TYPE (expr
);
3463 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3465 error ("invalid operands in binary expression");
3469 /* For general binary expressions we have the operations type
3470 as the effective type the operation is carried out on. So all
3471 we need to require is that both operands are trivially convertible
3473 if (!useless_type_conversion_p (type
, TREE_TYPE (op0
))
3474 || !useless_type_conversion_p (type
, TREE_TYPE (op1
)))
3476 error ("type mismatch in binary expression");
3477 debug_generic_stmt (type
);
3478 debug_generic_stmt (TREE_TYPE (op0
));
3479 debug_generic_stmt (TREE_TYPE (op1
));
3486 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3487 Returns true if there is an error, otherwise false. */
3490 verify_gimple_min_lval (tree expr
)
3494 if (is_gimple_id (expr
))
3497 if (TREE_CODE (expr
) != INDIRECT_REF
3498 && TREE_CODE (expr
) != ALIGN_INDIRECT_REF
3499 && TREE_CODE (expr
) != MISALIGNED_INDIRECT_REF
)
3501 error ("invalid expression for min lvalue");
3505 op
= TREE_OPERAND (expr
, 0);
3506 if (!is_gimple_val (op
))
3508 error ("invalid operand in indirect reference");
3509 debug_generic_stmt (op
);
3512 if (!useless_type_conversion_p (TREE_TYPE (expr
),
3513 TREE_TYPE (TREE_TYPE (op
))))
3515 error ("type mismatch in indirect reference");
3516 debug_generic_stmt (TREE_TYPE (expr
));
3517 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3524 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3525 if there is an error, otherwise false. */
3528 verify_gimple_reference (tree expr
)
3530 while (handled_component_p (expr
))
3532 tree op
= TREE_OPERAND (expr
, 0);
3534 if (TREE_CODE (expr
) == ARRAY_REF
3535 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3537 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3538 || (TREE_OPERAND (expr
, 2)
3539 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3540 || (TREE_OPERAND (expr
, 3)
3541 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3543 error ("invalid operands to array reference");
3544 debug_generic_stmt (expr
);
3549 /* Verify if the reference array element types are compatible. */
3550 if (TREE_CODE (expr
) == ARRAY_REF
3551 && !useless_type_conversion_p (TREE_TYPE (expr
),
3552 TREE_TYPE (TREE_TYPE (op
))))
3554 error ("type mismatch in array reference");
3555 debug_generic_stmt (TREE_TYPE (expr
));
3556 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3559 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3560 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3561 TREE_TYPE (TREE_TYPE (op
))))
3563 error ("type mismatch in array range reference");
3564 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3565 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3569 if ((TREE_CODE (expr
) == REALPART_EXPR
3570 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3571 && !useless_type_conversion_p (TREE_TYPE (expr
),
3572 TREE_TYPE (TREE_TYPE (op
))))
3574 error ("type mismatch in real/imagpart reference");
3575 debug_generic_stmt (TREE_TYPE (expr
));
3576 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3580 if (TREE_CODE (expr
) == COMPONENT_REF
3581 && !useless_type_conversion_p (TREE_TYPE (expr
),
3582 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3584 error ("type mismatch in component reference");
3585 debug_generic_stmt (TREE_TYPE (expr
));
3586 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3590 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3591 is nothing to verify. Gross mismatches at most invoke
3592 undefined behavior. */
3597 return verify_gimple_min_lval (expr
);
3600 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3601 list of pointer-to types that is trivially convertible to DEST. */
3604 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3608 if (!TYPE_POINTER_TO (src_obj
))
3611 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3612 if (useless_type_conversion_p (dest
, src
))
3618 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3619 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3622 valid_fixed_convert_types_p (tree type1
, tree type2
)
3624 return (FIXED_POINT_TYPE_P (type1
)
3625 && (INTEGRAL_TYPE_P (type2
)
3626 || SCALAR_FLOAT_TYPE_P (type2
)
3627 || FIXED_POINT_TYPE_P (type2
)));
3630 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3631 error, otherwise false. */
3634 verify_gimple_expr (tree expr
)
3636 tree type
= TREE_TYPE (expr
);
3638 if (is_gimple_val (expr
))
3641 /* Special codes we cannot handle via their class. */
3642 switch (TREE_CODE (expr
))
3646 tree op
= TREE_OPERAND (expr
, 0);
3647 if (!is_gimple_val (op
))
3649 error ("invalid operand in conversion");
3653 /* Allow conversions between integral types and between
3655 if ((INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (op
)))
3656 || (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (op
))))
3659 /* Allow conversions between integral types and pointers only if
3660 there is no sign or zero extension involved. */
3661 if (((POINTER_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (op
)))
3662 || (POINTER_TYPE_P (TREE_TYPE (op
)) && INTEGRAL_TYPE_P (type
)))
3663 && (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op
))
3664 /* For targets were the precision of sizetype doesn't
3665 match that of pointers we need the following. */
3666 || type
== sizetype
|| TREE_TYPE (op
) == sizetype
))
3669 /* Allow conversion from integer to offset type and vice versa. */
3670 if ((TREE_CODE (type
) == OFFSET_TYPE
3671 && TREE_CODE (TREE_TYPE (op
)) == INTEGER_TYPE
)
3672 || (TREE_CODE (type
) == INTEGER_TYPE
3673 && TREE_CODE (TREE_TYPE (op
)) == OFFSET_TYPE
))
3676 /* Otherwise assert we are converting between types of the
3678 if (TREE_CODE (type
) != TREE_CODE (TREE_TYPE (op
)))
3680 error ("invalid types in nop conversion");
3681 debug_generic_expr (type
);
3682 debug_generic_expr (TREE_TYPE (op
));
3689 case FIXED_CONVERT_EXPR
:
3691 tree op
= TREE_OPERAND (expr
, 0);
3692 if (!is_gimple_val (op
))
3694 error ("invalid operand in conversion");
3698 if (!valid_fixed_convert_types_p (type
, TREE_TYPE (op
))
3699 && !valid_fixed_convert_types_p (TREE_TYPE (op
), type
))
3701 error ("invalid types in fixed-point conversion");
3702 debug_generic_expr (type
);
3703 debug_generic_expr (TREE_TYPE (op
));
3712 tree op
= TREE_OPERAND (expr
, 0);
3713 if (!is_gimple_val (op
))
3715 error ("invalid operand in int to float conversion");
3718 if (!INTEGRAL_TYPE_P (TREE_TYPE (op
))
3719 || !SCALAR_FLOAT_TYPE_P (type
))
3721 error ("invalid types in conversion to floating point");
3722 debug_generic_expr (type
);
3723 debug_generic_expr (TREE_TYPE (op
));
3729 case FIX_TRUNC_EXPR
:
3731 tree op
= TREE_OPERAND (expr
, 0);
3732 if (!is_gimple_val (op
))
3734 error ("invalid operand in float to int conversion");
3737 if (!INTEGRAL_TYPE_P (type
)
3738 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op
)))
3740 error ("invalid types in conversion to integer");
3741 debug_generic_expr (type
);
3742 debug_generic_expr (TREE_TYPE (op
));
3750 tree op0
= TREE_OPERAND (expr
, 0);
3751 tree op1
= TREE_OPERAND (expr
, 1);
3752 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3754 error ("invalid operands in complex expression");
3757 if (!TREE_CODE (type
) == COMPLEX_TYPE
3758 || !(TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
3759 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0
)))
3760 || !(TREE_CODE (TREE_TYPE (op1
)) == INTEGER_TYPE
3761 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1
)))
3762 || !useless_type_conversion_p (TREE_TYPE (type
),
3764 || !useless_type_conversion_p (TREE_TYPE (type
),
3767 error ("type mismatch in complex expression");
3768 debug_generic_stmt (TREE_TYPE (expr
));
3769 debug_generic_stmt (TREE_TYPE (op0
));
3770 debug_generic_stmt (TREE_TYPE (op1
));
3778 /* This is used like COMPLEX_EXPR but for vectors. */
3779 if (TREE_CODE (type
) != VECTOR_TYPE
)
3781 error ("constructor not allowed for non-vector types");
3782 debug_generic_stmt (type
);
3785 /* FIXME: verify constructor arguments. */
3794 tree op0
= TREE_OPERAND (expr
, 0);
3795 tree op1
= TREE_OPERAND (expr
, 1);
3796 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3798 error ("invalid operands in shift expression");
3801 if (!TREE_CODE (TREE_TYPE (op1
)) == INTEGER_TYPE
3802 || !useless_type_conversion_p (type
, TREE_TYPE (op0
)))
3804 error ("type mismatch in shift expression");
3805 debug_generic_stmt (TREE_TYPE (expr
));
3806 debug_generic_stmt (TREE_TYPE (op0
));
3807 debug_generic_stmt (TREE_TYPE (op1
));
3816 tree op0
= TREE_OPERAND (expr
, 0);
3817 tree op1
= TREE_OPERAND (expr
, 1);
3818 if (POINTER_TYPE_P (type
)
3819 || POINTER_TYPE_P (TREE_TYPE (op0
))
3820 || POINTER_TYPE_P (TREE_TYPE (op1
)))
3822 error ("invalid (pointer) operands to plus/minus");
3825 /* Continue with generic binary expression handling. */
3829 case POINTER_PLUS_EXPR
:
3831 tree op0
= TREE_OPERAND (expr
, 0);
3832 tree op1
= TREE_OPERAND (expr
, 1);
3833 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3835 error ("invalid operands in pointer plus expression");
3838 if (!POINTER_TYPE_P (TREE_TYPE (op0
))
3839 || !useless_type_conversion_p (type
, TREE_TYPE (op0
))
3840 || !useless_type_conversion_p (sizetype
, TREE_TYPE (op1
)))
3842 error ("type mismatch in pointer plus expression");
3843 debug_generic_stmt (type
);
3844 debug_generic_stmt (TREE_TYPE (op0
));
3845 debug_generic_stmt (TREE_TYPE (op1
));
3853 tree op0
= TREE_OPERAND (expr
, 0);
3854 tree op1
= TREE_OPERAND (expr
, 1);
3855 tree op2
= TREE_OPERAND (expr
, 2);
3856 if ((!is_gimple_val (op1
)
3857 && TREE_CODE (TREE_TYPE (op1
)) != VOID_TYPE
)
3858 || (!is_gimple_val (op2
)
3859 && TREE_CODE (TREE_TYPE (op2
)) != VOID_TYPE
))
3861 error ("invalid operands in conditional expression");
3864 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0
))
3865 || (TREE_CODE (TREE_TYPE (op1
)) != VOID_TYPE
3866 && !useless_type_conversion_p (type
, TREE_TYPE (op1
)))
3867 || (TREE_CODE (TREE_TYPE (op2
)) != VOID_TYPE
3868 && !useless_type_conversion_p (type
, TREE_TYPE (op2
))))
3870 error ("type mismatch in conditional expression");
3871 debug_generic_stmt (type
);
3872 debug_generic_stmt (TREE_TYPE (op0
));
3873 debug_generic_stmt (TREE_TYPE (op1
));
3874 debug_generic_stmt (TREE_TYPE (op2
));
3877 return verify_gimple_expr (op0
);
3882 tree op
= TREE_OPERAND (expr
, 0);
3883 if (!is_gimple_addressable (op
))
3885 error ("invalid operand in unary expression");
3888 if (!one_pointer_to_useless_type_conversion_p (type
, TREE_TYPE (op
))
3889 /* FIXME: a longstanding wart, &a == &a[0]. */
3890 && (TREE_CODE (TREE_TYPE (op
)) != ARRAY_TYPE
3891 || !one_pointer_to_useless_type_conversion_p (type
,
3892 TREE_TYPE (TREE_TYPE (op
)))))
3894 error ("type mismatch in address expression");
3895 debug_generic_stmt (TREE_TYPE (expr
));
3896 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op
)));
3900 return verify_gimple_reference (op
);
3903 case TRUTH_ANDIF_EXPR
:
3904 case TRUTH_ORIF_EXPR
:
3907 case TRUTH_AND_EXPR
:
3909 case TRUTH_XOR_EXPR
:
3911 tree op0
= TREE_OPERAND (expr
, 0);
3912 tree op1
= TREE_OPERAND (expr
, 1);
3914 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3916 error ("invalid operands in truth expression");
3920 /* We allow any kind of integral typed argument and result. */
3921 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0
))
3922 || !INTEGRAL_TYPE_P (TREE_TYPE (op1
))
3923 || !INTEGRAL_TYPE_P (type
))
3925 error ("type mismatch in binary truth expression");
3926 debug_generic_stmt (type
);
3927 debug_generic_stmt (TREE_TYPE (op0
));
3928 debug_generic_stmt (TREE_TYPE (op1
));
3935 case TRUTH_NOT_EXPR
:
3937 tree op
= TREE_OPERAND (expr
, 0);
3939 if (!is_gimple_val (op
))
3941 error ("invalid operand in unary not");
3945 /* For TRUTH_NOT_EXPR we can have any kind of integral
3946 typed arguments and results. */
3947 if (!INTEGRAL_TYPE_P (TREE_TYPE (op
))
3948 || !INTEGRAL_TYPE_P (type
))
3950 error ("type mismatch in not expression");
3951 debug_generic_expr (TREE_TYPE (expr
));
3952 debug_generic_expr (TREE_TYPE (op
));
3960 /* FIXME. The C frontend passes unpromoted arguments in case it
3961 didn't see a function declaration before the call. */
3963 tree decl
= CALL_EXPR_FN (expr
);
3965 if (TREE_CODE (decl
) == FUNCTION_DECL
3966 && DECL_LOOPING_CONST_OR_PURE_P (decl
)
3967 && (!DECL_PURE_P (decl
))
3968 && (!TREE_READONLY (decl
)))
3970 error ("invalid pure const state for function");
3983 /* Generic handling via classes. */
3984 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3987 return verify_gimple_unary_expr (expr
);
3990 return verify_gimple_binary_expr (expr
);
3993 return verify_gimple_reference (expr
);
3995 case tcc_comparison
:
3997 tree op0
= TREE_OPERAND (expr
, 0);
3998 tree op1
= TREE_OPERAND (expr
, 1);
3999 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
4001 error ("invalid operands in comparison expression");
4004 /* For comparisons we do not have the operations type as the
4005 effective type the comparison is carried out in. Instead
4006 we require that either the first operand is trivially
4007 convertible into the second, or the other way around.
4008 The resulting type of a comparison may be any integral type.
4009 Because we special-case pointers to void we allow
4010 comparisons of pointers with the same mode as well. */
4011 if ((!useless_type_conversion_p (TREE_TYPE (op0
), TREE_TYPE (op1
))
4012 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
))
4013 && (!POINTER_TYPE_P (TREE_TYPE (op0
))
4014 || !POINTER_TYPE_P (TREE_TYPE (op1
))
4015 || TYPE_MODE (TREE_TYPE (op0
)) != TYPE_MODE (TREE_TYPE (op1
))))
4016 || !INTEGRAL_TYPE_P (type
))
4018 error ("type mismatch in comparison expression");
4019 debug_generic_stmt (TREE_TYPE (expr
));
4020 debug_generic_stmt (TREE_TYPE (op0
));
4021 debug_generic_stmt (TREE_TYPE (op1
));
4034 /* Verify the GIMPLE assignment statement STMT. Returns true if there
4035 is an error, otherwise false. */
4038 verify_gimple_modify_stmt (const_tree stmt
)
4040 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
4041 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
4043 gcc_assert (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
);
4045 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4048 error ("non-trivial conversion at assignment");
4049 debug_generic_expr (TREE_TYPE (lhs
));
4050 debug_generic_expr (TREE_TYPE (rhs
));
4054 /* Loads/stores from/to a variable are ok. */
4055 if ((is_gimple_val (lhs
)
4056 && is_gimple_variable (rhs
))
4057 || (is_gimple_val (rhs
)
4058 && is_gimple_variable (lhs
)))
4061 /* Aggregate copies are ok. */
4062 if (!is_gimple_reg_type (TREE_TYPE (lhs
))
4063 && !is_gimple_reg_type (TREE_TYPE (rhs
)))
4066 /* We might get 'loads' from a parameter which is not a gimple value. */
4067 if (TREE_CODE (rhs
) == PARM_DECL
)
4068 return verify_gimple_expr (lhs
);
4070 if (!is_gimple_variable (lhs
)
4071 && verify_gimple_expr (lhs
))
4074 if (!is_gimple_variable (rhs
)
4075 && verify_gimple_expr (rhs
))
4081 /* Verify the GIMPLE statement STMT. Returns true if there is an
4082 error, otherwise false. */
4085 verify_gimple_stmt (tree stmt
)
4087 if (!is_gimple_stmt (stmt
))
4089 error ("is not a valid GIMPLE statement");
4093 if (OMP_DIRECTIVE_P (stmt
))
4095 /* OpenMP directives are validated by the FE and never operated
4096 on by the optimizers. Furthermore, OMP_FOR may contain
4097 non-gimple expressions when the main index variable has had
4098 its address taken. This does not affect the loop itself
4099 because the header of an OMP_FOR is merely used to determine
4100 how to setup the parallel iteration. */
4104 switch (TREE_CODE (stmt
))
4106 case GIMPLE_MODIFY_STMT
:
4107 return verify_gimple_modify_stmt (stmt
);
4114 if (!is_gimple_val (TREE_OPERAND (stmt
, 0)))
4116 error ("invalid operand to switch statement");
4117 debug_generic_expr (TREE_OPERAND (stmt
, 0));
4123 tree op
= TREE_OPERAND (stmt
, 0);
4125 if (TREE_CODE (TREE_TYPE (stmt
)) != VOID_TYPE
)
4127 error ("type error in return expression");
4132 || TREE_CODE (op
) == RESULT_DECL
)
4135 return verify_gimple_modify_stmt (op
);
4140 return verify_gimple_expr (stmt
);
4143 case CHANGE_DYNAMIC_TYPE_EXPR
:
4153 /* Verify the GIMPLE statements inside the statement list STMTS.
4154 Returns true if there were any errors. */
4157 verify_gimple_2 (tree stmts
)
4159 tree_stmt_iterator tsi
;
4162 for (tsi
= tsi_start (stmts
); !tsi_end_p (tsi
); tsi_next (&tsi
))
4164 tree stmt
= tsi_stmt (tsi
);
4166 switch (TREE_CODE (stmt
))
4169 err
|= verify_gimple_2 (BIND_EXPR_BODY (stmt
));
4172 case TRY_CATCH_EXPR
:
4173 case TRY_FINALLY_EXPR
:
4174 err
|= verify_gimple_2 (TREE_OPERAND (stmt
, 0));
4175 err
|= verify_gimple_2 (TREE_OPERAND (stmt
, 1));
4179 err
|= verify_gimple_2 (CATCH_BODY (stmt
));
4182 case EH_FILTER_EXPR
:
4183 err
|= verify_gimple_2 (EH_FILTER_FAILURE (stmt
));
4188 bool err2
= verify_gimple_stmt (stmt
);
4190 debug_generic_expr (stmt
);
4200 /* Verify the GIMPLE statements inside the statement list STMTS. */
4203 verify_gimple_1 (tree stmts
)
4205 if (verify_gimple_2 (stmts
))
4206 internal_error ("verify_gimple failed");
4209 /* Verify the GIMPLE statements inside the current function. */
4212 verify_gimple (void)
4214 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun
->decl
)));
4217 /* Verify STMT, return true if STMT is not in GIMPLE form.
4218 TODO: Implement type checking. */
4221 verify_stmt (tree stmt
, bool last_in_block
)
4225 if (OMP_DIRECTIVE_P (stmt
))
4227 /* OpenMP directives are validated by the FE and never operated
4228 on by the optimizers. Furthermore, OMP_FOR may contain
4229 non-gimple expressions when the main index variable has had
4230 its address taken. This does not affect the loop itself
4231 because the header of an OMP_FOR is merely used to determine
4232 how to setup the parallel iteration. */
4236 if (!is_gimple_stmt (stmt
))
4238 error ("is not a valid GIMPLE statement");
4242 addr
= walk_tree (&stmt
, verify_expr
, NULL
, NULL
);
4245 debug_generic_stmt (addr
);
4248 inform ("in statement");
4249 debug_generic_stmt (stmt
);
4254 /* If the statement is marked as part of an EH region, then it is
4255 expected that the statement could throw. Verify that when we
4256 have optimizations that simplify statements such that we prove
4257 that they cannot throw, that we update other data structures
4259 if (lookup_stmt_eh_region (stmt
) >= 0)
4261 if (!tree_could_throw_p (stmt
))
4263 error ("statement marked for throw, but doesn%'t");
4266 if (!last_in_block
&& tree_can_throw_internal (stmt
))
4268 error ("statement marked for throw in middle of block");
4276 debug_generic_stmt (stmt
);
4281 /* Return true when the T can be shared. */
4284 tree_node_can_be_shared (tree t
)
4286 if (IS_TYPE_OR_DECL_P (t
)
4287 || is_gimple_min_invariant (t
)
4288 || TREE_CODE (t
) == SSA_NAME
4289 || t
== error_mark_node
4290 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4293 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4296 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4297 && is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
4298 || TREE_CODE (t
) == COMPONENT_REF
4299 || TREE_CODE (t
) == REALPART_EXPR
4300 || TREE_CODE (t
) == IMAGPART_EXPR
)
4301 t
= TREE_OPERAND (t
, 0);
4310 /* Called via walk_trees. Verify tree sharing. */
4313 verify_node_sharing (tree
* tp
, int *walk_subtrees
, void *data
)
4315 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4317 if (tree_node_can_be_shared (*tp
))
4319 *walk_subtrees
= false;
4323 if (pointer_set_insert (visited
, *tp
))
4330 /* Helper function for verify_gimple_tuples. */
4333 verify_gimple_tuples_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
4334 void *data ATTRIBUTE_UNUSED
)
4336 switch (TREE_CODE (*tp
))
4339 error ("unexpected non-tuple");
4349 /* Verify that there are no trees that should have been converted to
4350 gimple tuples. Return true if T contains a node that should have
4351 been converted to a gimple tuple, but hasn't. */
4354 verify_gimple_tuples (tree t
)
4356 return walk_tree (&t
, verify_gimple_tuples_1
, NULL
, NULL
) != NULL
;
4359 static bool eh_error_found
;
4361 verify_eh_throw_stmt_node (void **slot
, void *data
)
4363 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4364 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4366 if (!pointer_set_contains (visited
, node
->stmt
))
4368 error ("Dead STMT in EH table");
4369 debug_generic_stmt (node
->stmt
);
4370 eh_error_found
= true;
4375 /* Verify the GIMPLE statement chain. */
4381 block_stmt_iterator bsi
;
4383 struct pointer_set_t
*visited
, *visited_stmts
;
4386 timevar_push (TV_TREE_STMT_VERIFY
);
4387 visited
= pointer_set_create ();
4388 visited_stmts
= pointer_set_create ();
4395 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4397 int phi_num_args
= PHI_NUM_ARGS (phi
);
4399 pointer_set_insert (visited_stmts
, phi
);
4400 if (bb_for_stmt (phi
) != bb
)
4402 error ("bb_for_stmt (phi) is set to a wrong basic block");
4406 for (i
= 0; i
< phi_num_args
; i
++)
4408 tree t
= PHI_ARG_DEF (phi
, i
);
4413 error ("missing PHI def");
4414 debug_generic_stmt (phi
);
4418 /* Addressable variables do have SSA_NAMEs but they
4419 are not considered gimple values. */
4420 else if (TREE_CODE (t
) != SSA_NAME
4421 && TREE_CODE (t
) != FUNCTION_DECL
4422 && !is_gimple_min_invariant (t
))
4424 error ("PHI def is not a GIMPLE value");
4425 debug_generic_stmt (phi
);
4426 debug_generic_stmt (t
);
4430 addr
= walk_tree (&t
, verify_node_sharing
, visited
, NULL
);
4433 error ("incorrect sharing of tree nodes");
4434 debug_generic_stmt (phi
);
4435 debug_generic_stmt (addr
);
4441 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
4443 tree stmt
= bsi_stmt (bsi
);
4445 pointer_set_insert (visited_stmts
, stmt
);
4446 err
|= verify_gimple_tuples (stmt
);
4448 if (bb_for_stmt (stmt
) != bb
)
4450 error ("bb_for_stmt (stmt) is set to a wrong basic block");
4455 err
|= verify_stmt (stmt
, bsi_end_p (bsi
));
4456 addr
= walk_tree (&stmt
, verify_node_sharing
, visited
, NULL
);
4459 error ("incorrect sharing of tree nodes");
4460 debug_generic_stmt (stmt
);
4461 debug_generic_stmt (addr
);
4466 eh_error_found
= false;
4467 if (get_eh_throw_stmt_table (cfun
))
4468 htab_traverse (get_eh_throw_stmt_table (cfun
),
4469 verify_eh_throw_stmt_node
,
4472 if (err
| eh_error_found
)
4473 internal_error ("verify_stmts failed");
4475 pointer_set_destroy (visited
);
4476 pointer_set_destroy (visited_stmts
);
4477 verify_histograms ();
4478 timevar_pop (TV_TREE_STMT_VERIFY
);
4482 /* Verifies that the flow information is OK. */
4485 tree_verify_flow_info (void)
4489 block_stmt_iterator bsi
;
4494 if (ENTRY_BLOCK_PTR
->il
.tree
)
4496 error ("ENTRY_BLOCK has IL associated with it");
4500 if (EXIT_BLOCK_PTR
->il
.tree
)
4502 error ("EXIT_BLOCK has IL associated with it");
4506 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4507 if (e
->flags
& EDGE_FALLTHRU
)
4509 error ("fallthru to exit from bb %d", e
->src
->index
);
4515 bool found_ctrl_stmt
= false;
4519 /* Skip labels on the start of basic block. */
4520 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4522 tree prev_stmt
= stmt
;
4524 stmt
= bsi_stmt (bsi
);
4526 if (TREE_CODE (stmt
) != LABEL_EXPR
)
4529 if (prev_stmt
&& DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
4531 error ("nonlocal label ");
4532 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
4533 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4538 if (label_to_block (LABEL_EXPR_LABEL (stmt
)) != bb
)
4541 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
4542 fprintf (stderr
, " to block does not match in bb %d",
4547 if (decl_function_context (LABEL_EXPR_LABEL (stmt
))
4548 != current_function_decl
)
4551 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
4552 fprintf (stderr
, " has incorrect context in bb %d",
4558 /* Verify that body of basic block BB is free of control flow. */
4559 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
4561 tree stmt
= bsi_stmt (bsi
);
4563 if (found_ctrl_stmt
)
4565 error ("control flow in the middle of basic block %d",
4570 if (stmt_ends_bb_p (stmt
))
4571 found_ctrl_stmt
= true;
4573 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4576 print_generic_expr (stderr
, LABEL_EXPR_LABEL (stmt
), 0);
4577 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4582 bsi
= bsi_last (bb
);
4583 if (bsi_end_p (bsi
))
4586 stmt
= bsi_stmt (bsi
);
4588 err
|= verify_eh_edges (stmt
);
4590 if (is_ctrl_stmt (stmt
))
4592 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4593 if (e
->flags
& EDGE_FALLTHRU
)
4595 error ("fallthru edge after a control statement in bb %d",
4601 if (TREE_CODE (stmt
) != COND_EXPR
)
4603 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4604 after anything else but if statement. */
4605 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4606 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
4608 error ("true/false edge after a non-COND_EXPR in bb %d",
4614 switch (TREE_CODE (stmt
))
4621 if (COND_EXPR_THEN (stmt
) != NULL_TREE
4622 || COND_EXPR_ELSE (stmt
) != NULL_TREE
)
4624 error ("COND_EXPR with code in branches at the end of bb %d",
4629 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
4631 if (!true_edge
|| !false_edge
4632 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
4633 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
4634 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
4635 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
4636 || EDGE_COUNT (bb
->succs
) >= 3)
4638 error ("wrong outgoing edge flags at end of bb %d",
4646 if (simple_goto_p (stmt
))
4648 error ("explicit goto at end of bb %d", bb
->index
);
4653 /* FIXME. We should double check that the labels in the
4654 destination blocks have their address taken. */
4655 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4656 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
4657 | EDGE_FALSE_VALUE
))
4658 || !(e
->flags
& EDGE_ABNORMAL
))
4660 error ("wrong outgoing edge flags at end of bb %d",
4668 if (!single_succ_p (bb
)
4669 || (single_succ_edge (bb
)->flags
4670 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
4671 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
4673 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
4676 if (single_succ (bb
) != EXIT_BLOCK_PTR
)
4678 error ("return edge does not point to exit in bb %d",
4691 vec
= SWITCH_LABELS (stmt
);
4692 n
= TREE_VEC_LENGTH (vec
);
4694 /* Mark all the destination basic blocks. */
4695 for (i
= 0; i
< n
; ++i
)
4697 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
4698 basic_block label_bb
= label_to_block (lab
);
4700 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
4701 label_bb
->aux
= (void *)1;
4704 /* Verify that the case labels are sorted. */
4705 prev
= TREE_VEC_ELT (vec
, 0);
4706 for (i
= 1; i
< n
; ++i
)
4708 tree c
= TREE_VEC_ELT (vec
, i
);
4713 error ("found default case not at end of case vector");
4718 if (! tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
4720 error ("case labels not sorted: ");
4721 print_generic_expr (stderr
, prev
, 0);
4722 fprintf (stderr
," is greater than ");
4723 print_generic_expr (stderr
, c
, 0);
4724 fprintf (stderr
," but comes before it.\n");
4729 /* VRP will remove the default case if it can prove it will
4730 never be executed. So do not verify there always exists
4731 a default case here. */
4733 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4737 error ("extra outgoing edge %d->%d",
4738 bb
->index
, e
->dest
->index
);
4741 e
->dest
->aux
= (void *)2;
4742 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
4743 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
4745 error ("wrong outgoing edge flags at end of bb %d",
4751 /* Check that we have all of them. */
4752 for (i
= 0; i
< n
; ++i
)
4754 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
4755 basic_block label_bb
= label_to_block (lab
);
4757 if (label_bb
->aux
!= (void *)2)
4759 error ("missing edge %i->%i",
4760 bb
->index
, label_bb
->index
);
4765 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4766 e
->dest
->aux
= (void *)0;
4773 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
4774 verify_dominators (CDI_DOMINATORS
);
4780 /* Updates phi nodes after creating a forwarder block joined
4781 by edge FALLTHRU. */
4784 tree_make_forwarder_block (edge fallthru
)
4788 basic_block dummy
, bb
;
4789 tree phi
, new_phi
, var
;
4791 dummy
= fallthru
->src
;
4792 bb
= fallthru
->dest
;
4794 if (single_pred_p (bb
))
4797 /* If we redirected a branch we must create new PHI nodes at the
4799 for (phi
= phi_nodes (dummy
); phi
; phi
= PHI_CHAIN (phi
))
4801 var
= PHI_RESULT (phi
);
4802 new_phi
= create_phi_node (var
, bb
);
4803 SSA_NAME_DEF_STMT (var
) = new_phi
;
4804 SET_PHI_RESULT (phi
, make_ssa_name (SSA_NAME_VAR (var
), phi
));
4805 add_phi_arg (new_phi
, PHI_RESULT (phi
), fallthru
);
4808 /* Ensure that the PHI node chain is in the same order. */
4809 set_phi_nodes (bb
, phi_reverse (phi_nodes (bb
)));
4811 /* Add the arguments we have stored on edges. */
4812 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4817 flush_pending_stmts (e
);
4822 /* Return a non-special label in the head of basic block BLOCK.
4823 Create one if it doesn't exist. */
4826 tree_block_label (basic_block bb
)
4828 block_stmt_iterator i
, s
= bsi_start (bb
);
4832 for (i
= s
; !bsi_end_p (i
); first
= false, bsi_next (&i
))
4834 stmt
= bsi_stmt (i
);
4835 if (TREE_CODE (stmt
) != LABEL_EXPR
)
4837 label
= LABEL_EXPR_LABEL (stmt
);
4838 if (!DECL_NONLOCAL (label
))
4841 bsi_move_before (&i
, &s
);
4846 label
= create_artificial_label ();
4847 stmt
= build1 (LABEL_EXPR
, void_type_node
, label
);
4848 bsi_insert_before (&s
, stmt
, BSI_NEW_STMT
);
4853 /* Attempt to perform edge redirection by replacing a possibly complex
4854 jump instruction by a goto or by removing the jump completely.
4855 This can apply only if all edges now point to the same block. The
4856 parameters and return values are equivalent to
4857 redirect_edge_and_branch. */
4860 tree_try_redirect_by_replacing_jump (edge e
, basic_block target
)
4862 basic_block src
= e
->src
;
4863 block_stmt_iterator b
;
4866 /* We can replace or remove a complex jump only when we have exactly
4868 if (EDGE_COUNT (src
->succs
) != 2
4869 /* Verify that all targets will be TARGET. Specifically, the
4870 edge that is not E must also go to TARGET. */
4871 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
4877 stmt
= bsi_stmt (b
);
4879 if (TREE_CODE (stmt
) == COND_EXPR
4880 || TREE_CODE (stmt
) == SWITCH_EXPR
)
4882 bsi_remove (&b
, true);
4883 e
= ssa_redirect_edge (e
, target
);
4884 e
->flags
= EDGE_FALLTHRU
;
4892 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4893 edge representing the redirected branch. */
4896 tree_redirect_edge_and_branch (edge e
, basic_block dest
)
4898 basic_block bb
= e
->src
;
4899 block_stmt_iterator bsi
;
4903 if (e
->flags
& EDGE_ABNORMAL
)
4906 if (e
->src
!= ENTRY_BLOCK_PTR
4907 && (ret
= tree_try_redirect_by_replacing_jump (e
, dest
)))
4910 if (e
->dest
== dest
)
4913 bsi
= bsi_last (bb
);
4914 stmt
= bsi_end_p (bsi
) ? NULL
: bsi_stmt (bsi
);
4916 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
4919 /* For COND_EXPR, we only need to redirect the edge. */
4923 /* No non-abnormal edges should lead from a non-simple goto, and
4924 simple ones should be represented implicitly. */
4929 tree cases
= get_cases_for_edge (e
, stmt
);
4930 tree label
= tree_block_label (dest
);
4932 /* If we have a list of cases associated with E, then use it
4933 as it's a lot faster than walking the entire case vector. */
4936 edge e2
= find_edge (e
->src
, dest
);
4943 CASE_LABEL (cases
) = label
;
4944 cases
= TREE_CHAIN (cases
);
4947 /* If there was already an edge in the CFG, then we need
4948 to move all the cases associated with E to E2. */
4951 tree cases2
= get_cases_for_edge (e2
, stmt
);
4953 TREE_CHAIN (last
) = TREE_CHAIN (cases2
);
4954 TREE_CHAIN (cases2
) = first
;
4959 tree vec
= SWITCH_LABELS (stmt
);
4960 size_t i
, n
= TREE_VEC_LENGTH (vec
);
4962 for (i
= 0; i
< n
; i
++)
4964 tree elt
= TREE_VEC_ELT (vec
, i
);
4966 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
4967 CASE_LABEL (elt
) = label
;
4975 bsi_remove (&bsi
, true);
4976 e
->flags
|= EDGE_FALLTHRU
;
4981 case OMP_SECTIONS_SWITCH
:
4983 /* The edges from OMP constructs can be simply redirected. */
4987 /* Otherwise it must be a fallthru edge, and we don't need to
4988 do anything besides redirecting it. */
4989 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
4993 /* Update/insert PHI nodes as necessary. */
4995 /* Now update the edges in the CFG. */
4996 e
= ssa_redirect_edge (e
, dest
);
5001 /* Returns true if it is possible to remove edge E by redirecting
5002 it to the destination of the other edge from E->src. */
5005 tree_can_remove_branch_p (const_edge e
)
5007 if (e
->flags
& EDGE_ABNORMAL
)
5013 /* Simple wrapper, as we can always redirect fallthru edges. */
5016 tree_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5018 e
= tree_redirect_edge_and_branch (e
, dest
);
5025 /* Splits basic block BB after statement STMT (but at least after the
5026 labels). If STMT is NULL, BB is split just after the labels. */
5029 tree_split_block (basic_block bb
, void *stmt
)
5031 block_stmt_iterator bsi
;
5032 tree_stmt_iterator tsi_tgt
;
5038 new_bb
= create_empty_bb (bb
);
5040 /* Redirect the outgoing edges. */
5041 new_bb
->succs
= bb
->succs
;
5043 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5046 if (stmt
&& TREE_CODE ((tree
) stmt
) == LABEL_EXPR
)
5049 /* Move everything from BSI to the new basic block. */
5050 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
5052 act
= bsi_stmt (bsi
);
5053 if (TREE_CODE (act
) == LABEL_EXPR
)
5066 if (bsi_end_p (bsi
))
5069 /* Split the statement list - avoid re-creating new containers as this
5070 brings ugly quadratic memory consumption in the inliner.
5071 (We are still quadratic since we need to update stmt BB pointers,
5073 list
= tsi_split_statement_list_before (&bsi
.tsi
);
5074 set_bb_stmt_list (new_bb
, list
);
5075 for (tsi_tgt
= tsi_start (list
);
5076 !tsi_end_p (tsi_tgt
); tsi_next (&tsi_tgt
))
5077 change_bb_for_stmt (tsi_stmt (tsi_tgt
), new_bb
);
5083 /* Moves basic block BB after block AFTER. */
5086 tree_move_block_after (basic_block bb
, basic_block after
)
5088 if (bb
->prev_bb
== after
)
5092 link_block (bb
, after
);
5098 /* Return true if basic_block can be duplicated. */
5101 tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5107 /* Create a duplicate of the basic block BB. NOTE: This does not
5108 preserve SSA form. */
5111 tree_duplicate_bb (basic_block bb
)
5114 block_stmt_iterator bsi
, bsi_tgt
;
5117 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
5119 /* Copy the PHI nodes. We ignore PHI node arguments here because
5120 the incoming edges have not been setup yet. */
5121 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5123 tree copy
= create_phi_node (PHI_RESULT (phi
), new_bb
);
5124 create_new_def_for (PHI_RESULT (copy
), copy
, PHI_RESULT_PTR (copy
));
5127 /* Keep the chain of PHI nodes in the same order so that they can be
5128 updated by ssa_redirect_edge. */
5129 set_phi_nodes (new_bb
, phi_reverse (phi_nodes (new_bb
)));
5131 bsi_tgt
= bsi_start (new_bb
);
5132 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
5134 def_operand_p def_p
;
5135 ssa_op_iter op_iter
;
5139 stmt
= bsi_stmt (bsi
);
5140 if (TREE_CODE (stmt
) == LABEL_EXPR
)
5143 /* Create a new copy of STMT and duplicate STMT's virtual
5145 copy
= unshare_expr (stmt
);
5146 bsi_insert_after (&bsi_tgt
, copy
, BSI_NEW_STMT
);
5147 copy_virtual_operands (copy
, stmt
);
5148 region
= lookup_stmt_eh_region (stmt
);
5150 add_stmt_to_eh_region (copy
, region
);
5151 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5153 /* Create new names for all the definitions created by COPY and
5154 add replacement mappings for each new name. */
5155 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5156 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5162 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5165 add_phi_args_after_copy_edge (edge e_copy
)
5167 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5170 tree phi
, phi_copy
, phi_next
, def
;
5172 if (!phi_nodes (e_copy
->dest
))
5175 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5177 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5178 dest
= get_bb_original (e_copy
->dest
);
5180 dest
= e_copy
->dest
;
5182 e
= find_edge (bb
, dest
);
5185 /* During loop unrolling the target of the latch edge is copied.
5186 In this case we are not looking for edge to dest, but to
5187 duplicated block whose original was dest. */
5188 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5190 if ((e
->dest
->flags
& BB_DUPLICATED
)
5191 && get_bb_original (e
->dest
) == dest
)
5195 gcc_assert (e
!= NULL
);
5198 for (phi
= phi_nodes (e
->dest
), phi_copy
= phi_nodes (e_copy
->dest
);
5200 phi
= phi_next
, phi_copy
= PHI_CHAIN (phi_copy
))
5202 phi_next
= PHI_CHAIN (phi
);
5203 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5204 add_phi_arg (phi_copy
, def
, e_copy
);
5209 /* Basic block BB_COPY was created by code duplication. Add phi node
5210 arguments for edges going out of BB_COPY. The blocks that were
5211 duplicated have BB_DUPLICATED set. */
5214 add_phi_args_after_copy_bb (basic_block bb_copy
)
5219 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5221 add_phi_args_after_copy_edge (e_copy
);
5225 /* Blocks in REGION_COPY array of length N_REGION were created by
5226 duplication of basic blocks. Add phi node arguments for edges
5227 going from these blocks. If E_COPY is not NULL, also add
5228 phi node arguments for its destination.*/
5231 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5236 for (i
= 0; i
< n_region
; i
++)
5237 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5239 for (i
= 0; i
< n_region
; i
++)
5240 add_phi_args_after_copy_bb (region_copy
[i
]);
5242 add_phi_args_after_copy_edge (e_copy
);
5244 for (i
= 0; i
< n_region
; i
++)
5245 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5248 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5249 important exit edge EXIT. By important we mean that no SSA name defined
5250 inside region is live over the other exit edges of the region. All entry
5251 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5252 to the duplicate of the region. SSA form, dominance and loop information
5253 is updated. The new basic blocks are stored to REGION_COPY in the same
5254 order as they had in REGION, provided that REGION_COPY is not NULL.
5255 The function returns false if it is unable to copy the region,
5259 tree_duplicate_sese_region (edge entry
, edge exit
,
5260 basic_block
*region
, unsigned n_region
,
5261 basic_block
*region_copy
)
5264 bool free_region_copy
= false, copying_header
= false;
5265 struct loop
*loop
= entry
->dest
->loop_father
;
5267 VEC (basic_block
, heap
) *doms
;
5269 int total_freq
= 0, entry_freq
= 0;
5270 gcov_type total_count
= 0, entry_count
= 0;
5272 if (!can_copy_bbs_p (region
, n_region
))
5275 /* Some sanity checking. Note that we do not check for all possible
5276 missuses of the functions. I.e. if you ask to copy something weird,
5277 it will work, but the state of structures probably will not be
5279 for (i
= 0; i
< n_region
; i
++)
5281 /* We do not handle subloops, i.e. all the blocks must belong to the
5283 if (region
[i
]->loop_father
!= loop
)
5286 if (region
[i
] != entry
->dest
5287 && region
[i
] == loop
->header
)
5291 set_loop_copy (loop
, loop
);
5293 /* In case the function is used for loop header copying (which is the primary
5294 use), ensure that EXIT and its copy will be new latch and entry edges. */
5295 if (loop
->header
== entry
->dest
)
5297 copying_header
= true;
5298 set_loop_copy (loop
, loop_outer (loop
));
5300 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5303 for (i
= 0; i
< n_region
; i
++)
5304 if (region
[i
] != exit
->src
5305 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5311 region_copy
= XNEWVEC (basic_block
, n_region
);
5312 free_region_copy
= true;
5315 gcc_assert (!need_ssa_update_p ());
5317 /* Record blocks outside the region that are dominated by something
5320 initialize_original_copy_tables ();
5322 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5324 if (entry
->dest
->count
)
5326 total_count
= entry
->dest
->count
;
5327 entry_count
= entry
->count
;
5328 /* Fix up corner cases, to avoid division by zero or creation of negative
5330 if (entry_count
> total_count
)
5331 entry_count
= total_count
;
5335 total_freq
= entry
->dest
->frequency
;
5336 entry_freq
= EDGE_FREQUENCY (entry
);
5337 /* Fix up corner cases, to avoid division by zero or creation of negative
5339 if (total_freq
== 0)
5341 else if (entry_freq
> total_freq
)
5342 entry_freq
= total_freq
;
5345 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5346 split_edge_bb_loc (entry
));
5349 scale_bbs_frequencies_gcov_type (region
, n_region
,
5350 total_count
- entry_count
,
5352 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5357 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5359 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5364 loop
->header
= exit
->dest
;
5365 loop
->latch
= exit
->src
;
5368 /* Redirect the entry and add the phi node arguments. */
5369 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5370 gcc_assert (redirected
!= NULL
);
5371 flush_pending_stmts (entry
);
5373 /* Concerning updating of dominators: We must recount dominators
5374 for entry block and its copy. Anything that is outside of the
5375 region, but was dominated by something inside needs recounting as
5377 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5378 VEC_safe_push (basic_block
, heap
, doms
, get_bb_original (entry
->dest
));
5379 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5380 VEC_free (basic_block
, heap
, doms
);
5382 /* Add the other PHI node arguments. */
5383 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5385 /* Update the SSA web. */
5386 update_ssa (TODO_update_ssa
);
5388 if (free_region_copy
)
5391 free_original_copy_tables ();
5395 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5396 are stored to REGION_COPY in the same order in that they appear
5397 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5398 the region, EXIT an exit from it. The condition guarding EXIT
5399 is moved to ENTRY. Returns true if duplication succeeds, false
5425 tree_duplicate_sese_tail (edge entry
, edge exit
,
5426 basic_block
*region
, unsigned n_region
,
5427 basic_block
*region_copy
)
5430 bool free_region_copy
= false;
5431 struct loop
*loop
= exit
->dest
->loop_father
;
5432 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5433 basic_block switch_bb
, entry_bb
, nentry_bb
;
5434 VEC (basic_block
, heap
) *doms
;
5435 int total_freq
= 0, exit_freq
= 0;
5436 gcov_type total_count
= 0, exit_count
= 0;
5437 edge exits
[2], nexits
[2], e
;
5438 block_stmt_iterator bsi
;
5442 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5444 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5446 if (!can_copy_bbs_p (region
, n_region
))
5449 /* Some sanity checking. Note that we do not check for all possible
5450 missuses of the functions. I.e. if you ask to copy something weird
5451 (e.g., in the example, if there is a jump from inside to the middle
5452 of some_code, or come_code defines some of the values used in cond)
5453 it will work, but the resulting code will not be correct. */
5454 for (i
= 0; i
< n_region
; i
++)
5456 /* We do not handle subloops, i.e. all the blocks must belong to the
5458 if (region
[i
]->loop_father
!= orig_loop
)
5461 if (region
[i
] == orig_loop
->latch
)
5465 initialize_original_copy_tables ();
5466 set_loop_copy (orig_loop
, loop
);
5470 region_copy
= XNEWVEC (basic_block
, n_region
);
5471 free_region_copy
= true;
5474 gcc_assert (!need_ssa_update_p ());
5476 /* Record blocks outside the region that are dominated by something
5478 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5480 if (exit
->src
->count
)
5482 total_count
= exit
->src
->count
;
5483 exit_count
= exit
->count
;
5484 /* Fix up corner cases, to avoid division by zero or creation of negative
5486 if (exit_count
> total_count
)
5487 exit_count
= total_count
;
5491 total_freq
= exit
->src
->frequency
;
5492 exit_freq
= EDGE_FREQUENCY (exit
);
5493 /* Fix up corner cases, to avoid division by zero or creation of negative
5495 if (total_freq
== 0)
5497 if (exit_freq
> total_freq
)
5498 exit_freq
= total_freq
;
5501 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
5502 split_edge_bb_loc (exit
));
5505 scale_bbs_frequencies_gcov_type (region
, n_region
,
5506 total_count
- exit_count
,
5508 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
5513 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
5515 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
5518 /* Create the switch block, and put the exit condition to it. */
5519 entry_bb
= entry
->dest
;
5520 nentry_bb
= get_bb_copy (entry_bb
);
5521 if (!last_stmt (entry
->src
)
5522 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
5523 switch_bb
= entry
->src
;
5525 switch_bb
= split_edge (entry
);
5526 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
5528 bsi
= bsi_last (switch_bb
);
5529 cond
= last_stmt (exit
->src
);
5530 gcc_assert (TREE_CODE (cond
) == COND_EXPR
);
5531 bsi_insert_after (&bsi
, unshare_expr (cond
), BSI_NEW_STMT
);
5533 sorig
= single_succ_edge (switch_bb
);
5534 sorig
->flags
= exits
[1]->flags
;
5535 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
5537 /* Register the new edge from SWITCH_BB in loop exit lists. */
5538 rescan_loop_exit (snew
, true, false);
5540 /* Add the PHI node arguments. */
5541 add_phi_args_after_copy (region_copy
, n_region
, snew
);
5543 /* Get rid of now superfluous conditions and associated edges (and phi node
5545 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
5546 PENDING_STMT (e
) = NULL_TREE
;
5547 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
5548 PENDING_STMT (e
) = NULL_TREE
;
5550 /* Anything that is outside of the region, but was dominated by something
5551 inside needs to update dominance info. */
5552 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5553 VEC_free (basic_block
, heap
, doms
);
5555 /* Update the SSA web. */
5556 update_ssa (TODO_update_ssa
);
5558 if (free_region_copy
)
5561 free_original_copy_tables ();
5566 DEF_VEC_P(basic_block);
5567 DEF_VEC_ALLOC_P(basic_block,heap);
5570 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5571 adding blocks when the dominator traversal reaches EXIT. This
5572 function silently assumes that ENTRY strictly dominates EXIT. */
5575 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
5576 VEC(basic_block
,heap
) **bbs_p
)
5580 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
5582 son
= next_dom_son (CDI_DOMINATORS
, son
))
5584 VEC_safe_push (basic_block
, heap
, *bbs_p
, son
);
5586 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
5590 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5591 The duplicates are recorded in VARS_MAP. */
5594 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
5597 tree t
= *tp
, new_t
;
5598 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
5601 if (DECL_CONTEXT (t
) == to_context
)
5604 loc
= pointer_map_contains (vars_map
, t
);
5608 loc
= pointer_map_insert (vars_map
, t
);
5612 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
5613 f
->local_decls
= tree_cons (NULL_TREE
, new_t
, f
->local_decls
);
5617 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
5618 new_t
= copy_node (t
);
5620 DECL_CONTEXT (new_t
) = to_context
;
5625 new_t
= (tree
) *loc
;
5630 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5631 VARS_MAP maps old ssa names and var_decls to the new ones. */
5634 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
5638 tree new_name
, decl
= SSA_NAME_VAR (name
);
5640 gcc_assert (is_gimple_reg (name
));
5642 loc
= pointer_map_contains (vars_map
, name
);
5646 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
5648 push_cfun (DECL_STRUCT_FUNCTION (to_context
));
5649 if (gimple_in_ssa_p (cfun
))
5650 add_referenced_var (decl
);
5652 new_name
= make_ssa_name (decl
, SSA_NAME_DEF_STMT (name
));
5653 if (SSA_NAME_IS_DEFAULT_DEF (name
))
5654 set_default_def (decl
, new_name
);
5657 loc
= pointer_map_insert (vars_map
, name
);
5661 new_name
= (tree
) *loc
;
5672 struct pointer_map_t
*vars_map
;
5673 htab_t new_label_map
;
5677 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5678 contained in *TP if it has been ORIG_BLOCK previously and change the
5679 DECL_CONTEXT of every local variable referenced in *TP. */
5682 move_stmt_r (tree
*tp
, int *walk_subtrees
, void *data
)
5684 struct move_stmt_d
*p
= (struct move_stmt_d
*) data
;
5687 if (EXPR_P (t
) || GIMPLE_STMT_P (t
))
5689 tree block
= TREE_BLOCK (t
);
5690 if (p
->orig_block
== NULL_TREE
5691 || block
== p
->orig_block
5692 || block
== NULL_TREE
)
5693 TREE_BLOCK (t
) = p
->new_block
;
5694 #ifdef ENABLE_CHECKING
5695 else if (block
!= p
->new_block
)
5697 while (block
&& block
!= p
->orig_block
)
5698 block
= BLOCK_SUPERCONTEXT (block
);
5704 if (OMP_DIRECTIVE_P (t
)
5705 && TREE_CODE (t
) != OMP_RETURN
5706 && TREE_CODE (t
) != OMP_CONTINUE
)
5708 /* Do not remap variables inside OMP directives. Variables
5709 referenced in clauses and directive header belong to the
5710 parent function and should not be moved into the child
5712 bool save_remap_decls_p
= p
->remap_decls_p
;
5713 p
->remap_decls_p
= false;
5716 walk_tree (&OMP_BODY (t
), move_stmt_r
, p
, NULL
);
5718 p
->remap_decls_p
= save_remap_decls_p
;
5720 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
5722 if (TREE_CODE (t
) == SSA_NAME
)
5723 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
5724 else if (TREE_CODE (t
) == LABEL_DECL
)
5726 if (p
->new_label_map
)
5728 struct tree_map in
, *out
;
5730 out
= (struct tree_map
*)
5731 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
5736 DECL_CONTEXT (t
) = p
->to_context
;
5738 else if (p
->remap_decls_p
)
5740 /* Replace T with its duplicate. T should no longer appear in the
5741 parent function, so this looks wasteful; however, it may appear
5742 in referenced_vars, and more importantly, as virtual operands of
5743 statements, and in alias lists of other variables. It would be
5744 quite difficult to expunge it from all those places. ??? It might
5745 suffice to do this for addressable variables. */
5746 if ((TREE_CODE (t
) == VAR_DECL
5747 && !is_global_var (t
))
5748 || TREE_CODE (t
) == CONST_DECL
)
5749 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
5752 && gimple_in_ssa_p (cfun
))
5754 push_cfun (DECL_STRUCT_FUNCTION (p
->to_context
));
5755 add_referenced_var (*tp
);
5761 else if (TYPE_P (t
))
5767 /* Marks virtual operands of all statements in basic blocks BBS for
5771 mark_virtual_ops_in_bb (basic_block bb
)
5774 block_stmt_iterator bsi
;
5776 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5777 mark_virtual_ops_for_renaming (phi
);
5779 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
5780 mark_virtual_ops_for_renaming (bsi_stmt (bsi
));
5783 /* Marks virtual operands of all statements in basic blocks BBS for
5787 mark_virtual_ops_in_region (VEC (basic_block
,heap
) *bbs
)
5792 for (i
= 0; VEC_iterate (basic_block
, bbs
, i
, bb
); i
++)
5793 mark_virtual_ops_in_bb (bb
);
5796 /* Move basic block BB from function CFUN to function DEST_FN. The
5797 block is moved out of the original linked list and placed after
5798 block AFTER in the new list. Also, the block is removed from the
5799 original array of blocks and placed in DEST_FN's array of blocks.
5800 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5801 updated to reflect the moved edges.
5803 The local variables are remapped to new instances, VARS_MAP is used
5804 to record the mapping. */
5807 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
5808 basic_block after
, bool update_edge_count_p
,
5809 struct move_stmt_d
*d
, int eh_offset
)
5811 struct control_flow_graph
*cfg
;
5814 block_stmt_iterator si
;
5815 unsigned old_len
, new_len
;
5818 /* Remove BB from dominance structures. */
5819 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
5821 remove_bb_from_loops (bb
);
5823 /* Link BB to the new linked list. */
5824 move_block_after (bb
, after
);
5826 /* Update the edge count in the corresponding flowgraphs. */
5827 if (update_edge_count_p
)
5828 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5830 cfun
->cfg
->x_n_edges
--;
5831 dest_cfun
->cfg
->x_n_edges
++;
5834 /* Remove BB from the original basic block array. */
5835 VEC_replace (basic_block
, cfun
->cfg
->x_basic_block_info
, bb
->index
, NULL
);
5836 cfun
->cfg
->x_n_basic_blocks
--;
5838 /* Grow DEST_CFUN's basic block array if needed. */
5839 cfg
= dest_cfun
->cfg
;
5840 cfg
->x_n_basic_blocks
++;
5841 if (bb
->index
>= cfg
->x_last_basic_block
)
5842 cfg
->x_last_basic_block
= bb
->index
+ 1;
5844 old_len
= VEC_length (basic_block
, cfg
->x_basic_block_info
);
5845 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
5847 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
5848 VEC_safe_grow_cleared (basic_block
, gc
, cfg
->x_basic_block_info
,
5852 VEC_replace (basic_block
, cfg
->x_basic_block_info
,
5855 /* Remap the variables in phi nodes. */
5856 for (phi
= phi_nodes (bb
); phi
; phi
= next_phi
)
5859 tree op
= PHI_RESULT (phi
);
5862 next_phi
= PHI_CHAIN (phi
);
5863 if (!is_gimple_reg (op
))
5865 /* Remove the phi nodes for virtual operands (alias analysis will be
5866 run for the new function, anyway). */
5867 remove_phi_node (phi
, NULL
, true);
5871 SET_PHI_RESULT (phi
,
5872 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
5873 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
5875 op
= USE_FROM_PTR (use
);
5876 if (TREE_CODE (op
) == SSA_NAME
)
5877 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
5881 for (si
= bsi_start (bb
); !bsi_end_p (si
); bsi_next (&si
))
5883 tree stmt
= bsi_stmt (si
);
5886 walk_tree (&stmt
, move_stmt_r
, d
, NULL
);
5888 if (TREE_CODE (stmt
) == LABEL_EXPR
)
5890 tree label
= LABEL_EXPR_LABEL (stmt
);
5891 int uid
= LABEL_DECL_UID (label
);
5893 gcc_assert (uid
> -1);
5895 old_len
= VEC_length (basic_block
, cfg
->x_label_to_block_map
);
5896 if (old_len
<= (unsigned) uid
)
5898 new_len
= 3 * uid
/ 2;
5899 VEC_safe_grow_cleared (basic_block
, gc
,
5900 cfg
->x_label_to_block_map
, new_len
);
5903 VEC_replace (basic_block
, cfg
->x_label_to_block_map
, uid
, bb
);
5904 VEC_replace (basic_block
, cfun
->cfg
->x_label_to_block_map
, uid
, NULL
);
5906 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
5908 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
5909 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
5911 else if (TREE_CODE (stmt
) == RESX_EXPR
&& eh_offset
!= 0)
5912 TREE_OPERAND (stmt
, 0) =
5913 build_int_cst (NULL_TREE
,
5914 TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0))
5917 region
= lookup_stmt_eh_region (stmt
);
5920 add_stmt_to_eh_region_fn (dest_cfun
, stmt
, region
+ eh_offset
);
5921 remove_stmt_from_eh_region (stmt
);
5922 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
5923 gimple_remove_stmt_histograms (cfun
, stmt
);
5926 /* We cannot leave any operands allocated from the operand caches of
5927 the current function. */
5928 free_stmt_operands (stmt
);
5929 push_cfun (dest_cfun
);
5935 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5936 the outermost EH region. Use REGION as the incoming base EH region. */
5939 find_outermost_region_in_block (struct function
*src_cfun
,
5940 basic_block bb
, int region
)
5942 block_stmt_iterator si
;
5944 for (si
= bsi_start (bb
); !bsi_end_p (si
); bsi_next (&si
))
5946 tree stmt
= bsi_stmt (si
);
5949 if (TREE_CODE (stmt
) == RESX_EXPR
)
5950 stmt_region
= TREE_INT_CST_LOW (TREE_OPERAND (stmt
, 0));
5952 stmt_region
= lookup_stmt_eh_region_fn (src_cfun
, stmt
);
5953 if (stmt_region
> 0)
5956 region
= stmt_region
;
5957 else if (stmt_region
!= region
)
5959 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
5960 gcc_assert (region
!= -1);
5969 new_label_mapper (tree decl
, void *data
)
5971 htab_t hash
= (htab_t
) data
;
5975 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
5977 m
= XNEW (struct tree_map
);
5978 m
->hash
= DECL_UID (decl
);
5979 m
->base
.from
= decl
;
5980 m
->to
= create_artificial_label ();
5981 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
5982 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
5983 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
5985 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
5986 gcc_assert (*slot
== NULL
);
5993 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5997 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6002 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &TREE_CHAIN (*tp
))
6005 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6008 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6010 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6011 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6013 TREE_CHAIN (t
) = TREE_CHAIN (*tp
);
6018 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6019 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6022 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6023 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6024 single basic block in the original CFG and the new basic block is
6025 returned. DEST_CFUN must not have a CFG yet.
6027 Note that the region need not be a pure SESE region. Blocks inside
6028 the region may contain calls to abort/exit. The only restriction
6029 is that ENTRY_BB should be the only entry point and it must
6032 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6033 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6034 to the new function.
6036 All local variables referenced in the region are assumed to be in
6037 the corresponding BLOCK_VARS and unexpanded variable lists
6038 associated with DEST_CFUN. */
6041 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6042 basic_block exit_bb
, tree orig_block
)
6044 VEC(basic_block
,heap
) *bbs
, *dom_bbs
;
6045 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6046 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6047 struct function
*saved_cfun
= cfun
;
6048 int *entry_flag
, *exit_flag
, eh_offset
;
6049 unsigned *entry_prob
, *exit_prob
;
6050 unsigned i
, num_entry_edges
, num_exit_edges
;
6053 htab_t new_label_map
;
6054 struct pointer_map_t
*vars_map
;
6055 struct loop
*loop
= entry_bb
->loop_father
;
6056 struct move_stmt_d d
;
6058 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6060 gcc_assert (entry_bb
!= exit_bb
6062 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6064 /* Collect all the blocks in the region. Manually add ENTRY_BB
6065 because it won't be added by dfs_enumerate_from. */
6067 VEC_safe_push (basic_block
, heap
, bbs
, entry_bb
);
6068 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6070 /* The blocks that used to be dominated by something in BBS will now be
6071 dominated by the new block. */
6072 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6073 VEC_address (basic_block
, bbs
),
6074 VEC_length (basic_block
, bbs
));
6076 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6077 the predecessor edges to ENTRY_BB and the successor edges to
6078 EXIT_BB so that we can re-attach them to the new basic block that
6079 will replace the region. */
6080 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6081 entry_pred
= (basic_block
*) xcalloc (num_entry_edges
, sizeof (basic_block
));
6082 entry_flag
= (int *) xcalloc (num_entry_edges
, sizeof (int));
6083 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6085 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6087 entry_prob
[i
] = e
->probability
;
6088 entry_flag
[i
] = e
->flags
;
6089 entry_pred
[i
++] = e
->src
;
6095 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6096 exit_succ
= (basic_block
*) xcalloc (num_exit_edges
,
6097 sizeof (basic_block
));
6098 exit_flag
= (int *) xcalloc (num_exit_edges
, sizeof (int));
6099 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6101 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6103 exit_prob
[i
] = e
->probability
;
6104 exit_flag
[i
] = e
->flags
;
6105 exit_succ
[i
++] = e
->dest
;
6117 /* Switch context to the child function to initialize DEST_FN's CFG. */
6118 gcc_assert (dest_cfun
->cfg
== NULL
);
6119 push_cfun (dest_cfun
);
6121 init_empty_tree_cfg ();
6123 /* Initialize EH information for the new function. */
6125 new_label_map
= NULL
;
6130 for (i
= 0; VEC_iterate (basic_block
, bbs
, i
, bb
); i
++)
6131 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6133 init_eh_for_function ();
6136 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6137 eh_offset
= duplicate_eh_regions (saved_cfun
, new_label_mapper
,
6138 new_label_map
, region
, 0);
6144 /* The ssa form for virtual operands in the source function will have to
6145 be repaired. We do not care for the real operands -- the sese region
6146 must be closed with respect to those. */
6147 mark_virtual_ops_in_region (bbs
);
6149 /* Move blocks from BBS into DEST_CFUN. */
6150 gcc_assert (VEC_length (basic_block
, bbs
) >= 2);
6151 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6152 vars_map
= pointer_map_create ();
6154 memset (&d
, 0, sizeof (d
));
6155 d
.vars_map
= vars_map
;
6156 d
.from_context
= cfun
->decl
;
6157 d
.to_context
= dest_cfun
->decl
;
6158 d
.new_label_map
= new_label_map
;
6159 d
.remap_decls_p
= true;
6160 d
.orig_block
= orig_block
;
6161 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6163 for (i
= 0; VEC_iterate (basic_block
, bbs
, i
, bb
); i
++)
6165 /* No need to update edge counts on the last block. It has
6166 already been updated earlier when we detached the region from
6167 the original CFG. */
6168 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
, eh_offset
);
6172 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6176 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6178 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6179 = BLOCK_SUBBLOCKS (orig_block
);
6180 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6181 block
; block
= BLOCK_CHAIN (block
))
6182 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6183 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6186 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6187 vars_map
, dest_cfun
->decl
);
6190 htab_delete (new_label_map
);
6191 pointer_map_destroy (vars_map
);
6193 /* Rewire the entry and exit blocks. The successor to the entry
6194 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6195 the child function. Similarly, the predecessor of DEST_FN's
6196 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6197 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6198 various CFG manipulation function get to the right CFG.
6200 FIXME, this is silly. The CFG ought to become a parameter to
6202 push_cfun (dest_cfun
);
6203 make_edge (ENTRY_BLOCK_PTR
, entry_bb
, EDGE_FALLTHRU
);
6205 make_edge (exit_bb
, EXIT_BLOCK_PTR
, 0);
6208 /* Back in the original function, the SESE region has disappeared,
6209 create a new basic block in its place. */
6210 bb
= create_empty_bb (entry_pred
[0]);
6212 add_bb_to_loop (bb
, loop
);
6213 for (i
= 0; i
< num_entry_edges
; i
++)
6215 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6216 e
->probability
= entry_prob
[i
];
6219 for (i
= 0; i
< num_exit_edges
; i
++)
6221 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6222 e
->probability
= exit_prob
[i
];
6225 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6226 for (i
= 0; VEC_iterate (basic_block
, dom_bbs
, i
, abb
); i
++)
6227 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6228 VEC_free (basic_block
, heap
, dom_bbs
);
6239 VEC_free (basic_block
, heap
, bbs
);
6245 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6248 dump_function_to_file (tree fn
, FILE *file
, int flags
)
6250 tree arg
, vars
, var
;
6251 struct function
*dsf
;
6252 bool ignore_topmost_bind
= false, any_var
= false;
6256 fprintf (file
, "%s (", lang_hooks
.decl_printable_name (fn
, 2));
6258 arg
= DECL_ARGUMENTS (fn
);
6261 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6262 fprintf (file
, " ");
6263 print_generic_expr (file
, arg
, dump_flags
);
6264 if (flags
& TDF_VERBOSE
)
6265 print_node (file
, "", arg
, 4);
6266 if (TREE_CHAIN (arg
))
6267 fprintf (file
, ", ");
6268 arg
= TREE_CHAIN (arg
);
6270 fprintf (file
, ")\n");
6272 if (flags
& TDF_VERBOSE
)
6273 print_node (file
, "", fn
, 2);
6275 dsf
= DECL_STRUCT_FUNCTION (fn
);
6276 if (dsf
&& (flags
& TDF_DETAILS
))
6277 dump_eh_tree (file
, dsf
);
6279 if (flags
& TDF_RAW
)
6281 dump_node (fn
, TDF_SLIM
| flags
, file
);
6285 /* Switch CFUN to point to FN. */
6286 push_cfun (DECL_STRUCT_FUNCTION (fn
));
6288 /* When GIMPLE is lowered, the variables are no longer available in
6289 BIND_EXPRs, so display them separately. */
6290 if (cfun
&& cfun
->decl
== fn
&& cfun
->local_decls
)
6292 ignore_topmost_bind
= true;
6294 fprintf (file
, "{\n");
6295 for (vars
= cfun
->local_decls
; vars
; vars
= TREE_CHAIN (vars
))
6297 var
= TREE_VALUE (vars
);
6299 print_generic_decl (file
, var
, flags
);
6300 if (flags
& TDF_VERBOSE
)
6301 print_node (file
, "", var
, 4);
6302 fprintf (file
, "\n");
6308 if (cfun
&& cfun
->decl
== fn
&& cfun
->cfg
&& basic_block_info
)
6310 /* Make a CFG based dump. */
6311 check_bb_profile (ENTRY_BLOCK_PTR
, file
);
6312 if (!ignore_topmost_bind
)
6313 fprintf (file
, "{\n");
6315 if (any_var
&& n_basic_blocks
)
6316 fprintf (file
, "\n");
6319 dump_generic_bb (file
, bb
, 2, flags
);
6321 fprintf (file
, "}\n");
6322 check_bb_profile (EXIT_BLOCK_PTR
, file
);
6328 /* Make a tree based dump. */
6329 chain
= DECL_SAVED_TREE (fn
);
6331 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
6333 if (ignore_topmost_bind
)
6335 chain
= BIND_EXPR_BODY (chain
);
6343 if (!ignore_topmost_bind
)
6344 fprintf (file
, "{\n");
6349 fprintf (file
, "\n");
6351 print_generic_stmt_indented (file
, chain
, flags
, indent
);
6352 if (ignore_topmost_bind
)
6353 fprintf (file
, "}\n");
6356 fprintf (file
, "\n\n");
6363 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6366 debug_function (tree fn
, int flags
)
6368 dump_function_to_file (fn
, stderr
, flags
);
6372 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6375 print_pred_bbs (FILE *file
, basic_block bb
)
6380 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6381 fprintf (file
, "bb_%d ", e
->src
->index
);
6385 /* Print on FILE the indexes for the successors of basic_block BB. */
6388 print_succ_bbs (FILE *file
, basic_block bb
)
6393 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6394 fprintf (file
, "bb_%d ", e
->dest
->index
);
6397 /* Print to FILE the basic block BB following the VERBOSITY level. */
6400 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
6402 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
6403 memset ((void *) s_indent
, ' ', (size_t) indent
);
6404 s_indent
[indent
] = '\0';
6406 /* Print basic_block's header. */
6409 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
6410 print_pred_bbs (file
, bb
);
6411 fprintf (file
, "}, succs = {");
6412 print_succ_bbs (file
, bb
);
6413 fprintf (file
, "})\n");
6416 /* Print basic_block's body. */
6419 fprintf (file
, "%s {\n", s_indent
);
6420 tree_dump_bb (bb
, file
, indent
+ 4);
6421 fprintf (file
, "%s }\n", s_indent
);
6425 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
6427 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6428 VERBOSITY level this outputs the contents of the loop, or just its
6432 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
6440 s_indent
= (char *) alloca ((size_t) indent
+ 1);
6441 memset ((void *) s_indent
, ' ', (size_t) indent
);
6442 s_indent
[indent
] = '\0';
6444 /* Print loop's header. */
6445 fprintf (file
, "%sloop_%d (header = %d, latch = %d", s_indent
,
6446 loop
->num
, loop
->header
->index
, loop
->latch
->index
);
6447 fprintf (file
, ", niter = ");
6448 print_generic_expr (file
, loop
->nb_iterations
, 0);
6450 if (loop
->any_upper_bound
)
6452 fprintf (file
, ", upper_bound = ");
6453 dump_double_int (file
, loop
->nb_iterations_upper_bound
, true);
6456 if (loop
->any_estimate
)
6458 fprintf (file
, ", estimate = ");
6459 dump_double_int (file
, loop
->nb_iterations_estimate
, true);
6461 fprintf (file
, ")\n");
6463 /* Print loop's body. */
6466 fprintf (file
, "%s{\n", s_indent
);
6468 if (bb
->loop_father
== loop
)
6469 print_loops_bb (file
, bb
, indent
, verbosity
);
6471 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
6472 fprintf (file
, "%s}\n", s_indent
);
6476 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6477 spaces. Following VERBOSITY level this outputs the contents of the
6478 loop, or just its structure. */
6481 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
6486 print_loop (file
, loop
, indent
, verbosity
);
6487 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
6490 /* Follow a CFG edge from the entry point of the program, and on entry
6491 of a loop, pretty print the loop structure on FILE. */
6494 print_loops (FILE *file
, int verbosity
)
6498 bb
= BASIC_BLOCK (NUM_FIXED_BLOCKS
);
6499 if (bb
&& bb
->loop_father
)
6500 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
6504 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6507 debug_loops (int verbosity
)
6509 print_loops (stderr
, verbosity
);
6512 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6515 debug_loop (struct loop
*loop
, int verbosity
)
6517 print_loop (stderr
, loop
, 0, verbosity
);
6520 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6524 debug_loop_num (unsigned num
, int verbosity
)
6526 debug_loop (get_loop (num
), verbosity
);
6529 /* Return true if BB ends with a call, possibly followed by some
6530 instructions that must stay with the call. Return false,
6534 tree_block_ends_with_call_p (basic_block bb
)
6536 block_stmt_iterator bsi
= bsi_last (bb
);
6537 return get_call_expr_in (bsi_stmt (bsi
)) != NULL
;
6541 /* Return true if BB ends with a conditional branch. Return false,
6545 tree_block_ends_with_condjump_p (const_basic_block bb
)
6547 /* This CONST_CAST is okay because last_stmt doesn't modify its
6548 argument and the return value is not modified. */
6549 const_tree stmt
= last_stmt (CONST_CAST_BB(bb
));
6550 return (stmt
&& TREE_CODE (stmt
) == COND_EXPR
);
6554 /* Return true if we need to add fake edge to exit at statement T.
6555 Helper function for tree_flow_call_edges_add. */
6558 need_fake_edge_p (tree t
)
6560 tree call
, fndecl
= NULL_TREE
;
6563 /* NORETURN and LONGJMP calls already have an edge to exit.
6564 CONST and PURE calls do not need one.
6565 We don't currently check for CONST and PURE here, although
6566 it would be a good idea, because those attributes are
6567 figured out from the RTL in mark_constant_function, and
6568 the counter incrementation code from -fprofile-arcs
6569 leads to different results from -fbranch-probabilities. */
6570 call
= get_call_expr_in (t
);
6573 fndecl
= get_callee_fndecl (call
);
6574 call_flags
= call_expr_flags (call
);
6577 if (call
&& fndecl
&& DECL_BUILT_IN (fndecl
)
6578 && (call_flags
& ECF_NOTHROW
)
6579 && !(call_flags
& ECF_NORETURN
)
6580 && !(call_flags
& ECF_RETURNS_TWICE
))
6583 if (call
&& !(call_flags
& ECF_NORETURN
))
6586 if (TREE_CODE (t
) == ASM_EXPR
6587 && (ASM_VOLATILE_P (t
) || ASM_INPUT_P (t
)))
6594 /* Add fake edges to the function exit for any non constant and non
6595 noreturn calls, volatile inline assembly in the bitmap of blocks
6596 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6597 the number of blocks that were split.
6599 The goal is to expose cases in which entering a basic block does
6600 not imply that all subsequent instructions must be executed. */
6603 tree_flow_call_edges_add (sbitmap blocks
)
6606 int blocks_split
= 0;
6607 int last_bb
= last_basic_block
;
6608 bool check_last_block
= false;
6610 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
6614 check_last_block
= true;
6616 check_last_block
= TEST_BIT (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
6618 /* In the last basic block, before epilogue generation, there will be
6619 a fallthru edge to EXIT. Special care is required if the last insn
6620 of the last basic block is a call because make_edge folds duplicate
6621 edges, which would result in the fallthru edge also being marked
6622 fake, which would result in the fallthru edge being removed by
6623 remove_fake_edges, which would result in an invalid CFG.
6625 Moreover, we can't elide the outgoing fake edge, since the block
6626 profiler needs to take this into account in order to solve the minimal
6627 spanning tree in the case that the call doesn't return.
6629 Handle this by adding a dummy instruction in a new last basic block. */
6630 if (check_last_block
)
6632 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
6633 block_stmt_iterator bsi
= bsi_last (bb
);
6635 if (!bsi_end_p (bsi
))
6638 if (t
&& need_fake_edge_p (t
))
6642 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
6645 bsi_insert_on_edge (e
, build_empty_stmt ());
6646 bsi_commit_edge_inserts ();
6651 /* Now add fake edges to the function exit for any non constant
6652 calls since there is no way that we can determine if they will
6654 for (i
= 0; i
< last_bb
; i
++)
6656 basic_block bb
= BASIC_BLOCK (i
);
6657 block_stmt_iterator bsi
;
6658 tree stmt
, last_stmt
;
6663 if (blocks
&& !TEST_BIT (blocks
, i
))
6666 bsi
= bsi_last (bb
);
6667 if (!bsi_end_p (bsi
))
6669 last_stmt
= bsi_stmt (bsi
);
6672 stmt
= bsi_stmt (bsi
);
6673 if (need_fake_edge_p (stmt
))
6676 /* The handling above of the final block before the
6677 epilogue should be enough to verify that there is
6678 no edge to the exit block in CFG already.
6679 Calling make_edge in such case would cause us to
6680 mark that edge as fake and remove it later. */
6681 #ifdef ENABLE_CHECKING
6682 if (stmt
== last_stmt
)
6684 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
6685 gcc_assert (e
== NULL
);
6689 /* Note that the following may create a new basic block
6690 and renumber the existing basic blocks. */
6691 if (stmt
!= last_stmt
)
6693 e
= split_block (bb
, stmt
);
6697 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
6701 while (!bsi_end_p (bsi
));
6706 verify_flow_info ();
6708 return blocks_split
;
6711 /* Purge dead abnormal call edges from basic block BB. */
6714 tree_purge_dead_abnormal_call_edges (basic_block bb
)
6716 bool changed
= tree_purge_dead_eh_edges (bb
);
6718 if (cfun
->has_nonlocal_label
)
6720 tree stmt
= last_stmt (bb
);
6724 if (!(stmt
&& tree_can_make_abnormal_goto (stmt
)))
6725 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6727 if (e
->flags
& EDGE_ABNORMAL
)
6736 /* See tree_purge_dead_eh_edges below. */
6738 free_dominance_info (CDI_DOMINATORS
);
6744 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6747 get_all_dominated_blocks (basic_block bb
, VEC (basic_block
, heap
) **dom_bbs
)
6751 VEC_safe_push (basic_block
, heap
, *dom_bbs
, bb
);
6752 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
6754 son
= next_dom_son (CDI_DOMINATORS
, son
))
6755 get_all_dominated_blocks (son
, dom_bbs
);
6758 /* Removes edge E and all the blocks dominated by it, and updates dominance
6759 information. The IL in E->src needs to be updated separately.
6760 If dominance info is not available, only the edge E is removed.*/
6763 remove_edge_and_dominated_blocks (edge e
)
6765 VEC (basic_block
, heap
) *bbs_to_remove
= NULL
;
6766 VEC (basic_block
, heap
) *bbs_to_fix_dom
= NULL
;
6770 bool none_removed
= false;
6772 basic_block bb
, dbb
;
6775 if (!dom_info_available_p (CDI_DOMINATORS
))
6781 /* No updating is needed for edges to exit. */
6782 if (e
->dest
== EXIT_BLOCK_PTR
)
6784 if (cfgcleanup_altered_bbs
)
6785 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
6790 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6791 that is not dominated by E->dest, then this set is empty. Otherwise,
6792 all the basic blocks dominated by E->dest are removed.
6794 Also, to DF_IDOM we store the immediate dominators of the blocks in
6795 the dominance frontier of E (i.e., of the successors of the
6796 removed blocks, if there are any, and of E->dest otherwise). */
6797 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
6802 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
6804 none_removed
= true;
6809 df
= BITMAP_ALLOC (NULL
);
6810 df_idom
= BITMAP_ALLOC (NULL
);
6813 bitmap_set_bit (df_idom
,
6814 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
6817 get_all_dominated_blocks (e
->dest
, &bbs_to_remove
);
6818 for (i
= 0; VEC_iterate (basic_block
, bbs_to_remove
, i
, bb
); i
++)
6820 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
6822 if (f
->dest
!= EXIT_BLOCK_PTR
)
6823 bitmap_set_bit (df
, f
->dest
->index
);
6826 for (i
= 0; VEC_iterate (basic_block
, bbs_to_remove
, i
, bb
); i
++)
6827 bitmap_clear_bit (df
, bb
->index
);
6829 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
6831 bb
= BASIC_BLOCK (i
);
6832 bitmap_set_bit (df_idom
,
6833 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
6837 if (cfgcleanup_altered_bbs
)
6839 /* Record the set of the altered basic blocks. */
6840 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
6841 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
6844 /* Remove E and the cancelled blocks. */
6849 for (i
= 0; VEC_iterate (basic_block
, bbs_to_remove
, i
, bb
); i
++)
6850 delete_basic_block (bb
);
6853 /* Update the dominance information. The immediate dominator may change only
6854 for blocks whose immediate dominator belongs to DF_IDOM:
6856 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6857 removal. Let Z the arbitrary block such that idom(Z) = Y and
6858 Z dominates X after the removal. Before removal, there exists a path P
6859 from Y to X that avoids Z. Let F be the last edge on P that is
6860 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6861 dominates W, and because of P, Z does not dominate W), and W belongs to
6862 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6863 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
6865 bb
= BASIC_BLOCK (i
);
6866 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
6868 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
6869 VEC_safe_push (basic_block
, heap
, bbs_to_fix_dom
, dbb
);
6872 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
6875 BITMAP_FREE (df_idom
);
6876 VEC_free (basic_block
, heap
, bbs_to_remove
);
6877 VEC_free (basic_block
, heap
, bbs_to_fix_dom
);
6880 /* Purge dead EH edges from basic block BB. */
6883 tree_purge_dead_eh_edges (basic_block bb
)
6885 bool changed
= false;
6888 tree stmt
= last_stmt (bb
);
6890 if (stmt
&& tree_can_throw_internal (stmt
))
6893 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6895 if (e
->flags
& EDGE_EH
)
6897 remove_edge_and_dominated_blocks (e
);
6908 tree_purge_all_dead_eh_edges (const_bitmap blocks
)
6910 bool changed
= false;
6914 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
6916 changed
|= tree_purge_dead_eh_edges (BASIC_BLOCK (i
));
6922 /* This function is called whenever a new edge is created or
6926 tree_execute_on_growing_pred (edge e
)
6928 basic_block bb
= e
->dest
;
6931 reserve_phi_args_for_new_edge (bb
);
6934 /* This function is called immediately before edge E is removed from
6935 the edge vector E->dest->preds. */
6938 tree_execute_on_shrinking_pred (edge e
)
6940 if (phi_nodes (e
->dest
))
6941 remove_phi_args (e
);
6944 /*---------------------------------------------------------------------------
6945 Helper functions for Loop versioning
6946 ---------------------------------------------------------------------------*/
6948 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6949 of 'first'. Both of them are dominated by 'new_head' basic block. When
6950 'new_head' was created by 'second's incoming edge it received phi arguments
6951 on the edge by split_edge(). Later, additional edge 'e' was created to
6952 connect 'new_head' and 'first'. Now this routine adds phi args on this
6953 additional edge 'e' that new_head to second edge received as part of edge
6958 tree_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
6959 basic_block new_head
, edge e
)
6962 edge e2
= find_edge (new_head
, second
);
6964 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6965 edge, we should always have an edge from NEW_HEAD to SECOND. */
6966 gcc_assert (e2
!= NULL
);
6968 /* Browse all 'second' basic block phi nodes and add phi args to
6969 edge 'e' for 'first' head. PHI args are always in correct order. */
6971 for (phi2
= phi_nodes (second
), phi1
= phi_nodes (first
);
6973 phi2
= PHI_CHAIN (phi2
), phi1
= PHI_CHAIN (phi1
))
6975 tree def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
6976 add_phi_arg (phi1
, def
, e
);
6980 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6981 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6982 the destination of the ELSE part. */
6984 tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
6985 basic_block second_head ATTRIBUTE_UNUSED
,
6986 basic_block cond_bb
, void *cond_e
)
6988 block_stmt_iterator bsi
;
6989 tree new_cond_expr
= NULL_TREE
;
6990 tree cond_expr
= (tree
) cond_e
;
6993 /* Build new conditional expr */
6994 new_cond_expr
= build3 (COND_EXPR
, void_type_node
, cond_expr
,
6995 NULL_TREE
, NULL_TREE
);
6997 /* Add new cond in cond_bb. */
6998 bsi
= bsi_start (cond_bb
);
6999 bsi_insert_after (&bsi
, new_cond_expr
, BSI_NEW_STMT
);
7000 /* Adjust edges appropriately to connect new head with first head
7001 as well as second head. */
7002 e0
= single_succ_edge (cond_bb
);
7003 e0
->flags
&= ~EDGE_FALLTHRU
;
7004 e0
->flags
|= EDGE_FALSE_VALUE
;
7007 struct cfg_hooks tree_cfg_hooks
= {
7009 tree_verify_flow_info
,
7010 tree_dump_bb
, /* dump_bb */
7011 create_bb
, /* create_basic_block */
7012 tree_redirect_edge_and_branch
,/* redirect_edge_and_branch */
7013 tree_redirect_edge_and_branch_force
,/* redirect_edge_and_branch_force */
7014 tree_can_remove_branch_p
, /* can_remove_branch_p */
7015 remove_bb
, /* delete_basic_block */
7016 tree_split_block
, /* split_block */
7017 tree_move_block_after
, /* move_block_after */
7018 tree_can_merge_blocks_p
, /* can_merge_blocks_p */
7019 tree_merge_blocks
, /* merge_blocks */
7020 tree_predict_edge
, /* predict_edge */
7021 tree_predicted_by_p
, /* predicted_by_p */
7022 tree_can_duplicate_bb_p
, /* can_duplicate_block_p */
7023 tree_duplicate_bb
, /* duplicate_block */
7024 tree_split_edge
, /* split_edge */
7025 tree_make_forwarder_block
, /* make_forward_block */
7026 NULL
, /* tidy_fallthru_edge */
7027 tree_block_ends_with_call_p
, /* block_ends_with_call_p */
7028 tree_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7029 tree_flow_call_edges_add
, /* flow_call_edges_add */
7030 tree_execute_on_growing_pred
, /* execute_on_growing_pred */
7031 tree_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7032 tree_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7033 tree_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7034 tree_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7035 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7036 flush_pending_stmts
/* flush_pending_stmts */
7040 /* Split all critical edges. */
7043 split_critical_edges (void)
7049 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7050 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7051 mappings around the calls to split_edge. */
7052 start_recording_case_labels ();
7055 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7056 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7061 end_recording_case_labels ();
7065 struct gimple_opt_pass pass_split_crit_edges
=
7069 "crited", /* name */
7071 split_critical_edges
, /* execute */
7074 0, /* static_pass_number */
7075 TV_TREE_SPLIT_EDGES
, /* tv_id */
7076 PROP_cfg
, /* properties required */
7077 PROP_no_crit_edges
, /* properties_provided */
7078 0, /* properties_destroyed */
7079 0, /* todo_flags_start */
7080 TODO_dump_func
/* todo_flags_finish */
7085 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
7086 a temporary, make sure and register it to be renamed if necessary,
7087 and finally return the temporary. Put the statements to compute
7088 EXP before the current statement in BSI. */
7091 gimplify_val (block_stmt_iterator
*bsi
, tree type
, tree exp
)
7093 tree t
, new_stmt
, orig_stmt
;
7095 if (is_gimple_val (exp
))
7098 t
= make_rename_temp (type
, NULL
);
7099 new_stmt
= build_gimple_modify_stmt (t
, exp
);
7101 orig_stmt
= bsi_stmt (*bsi
);
7102 SET_EXPR_LOCUS (new_stmt
, EXPR_LOCUS (orig_stmt
));
7103 TREE_BLOCK (new_stmt
) = TREE_BLOCK (orig_stmt
);
7105 bsi_insert_before (bsi
, new_stmt
, BSI_SAME_STMT
);
7106 if (gimple_in_ssa_p (cfun
))
7107 mark_symbols_for_renaming (new_stmt
);
7112 /* Build a ternary operation and gimplify it. Emit code before BSI.
7113 Return the gimple_val holding the result. */
7116 gimplify_build3 (block_stmt_iterator
*bsi
, enum tree_code code
,
7117 tree type
, tree a
, tree b
, tree c
)
7121 ret
= fold_build3 (code
, type
, a
, b
, c
);
7124 return gimplify_val (bsi
, type
, ret
);
7127 /* Build a binary operation and gimplify it. Emit code before BSI.
7128 Return the gimple_val holding the result. */
7131 gimplify_build2 (block_stmt_iterator
*bsi
, enum tree_code code
,
7132 tree type
, tree a
, tree b
)
7136 ret
= fold_build2 (code
, type
, a
, b
);
7139 return gimplify_val (bsi
, type
, ret
);
7142 /* Build a unary operation and gimplify it. Emit code before BSI.
7143 Return the gimple_val holding the result. */
7146 gimplify_build1 (block_stmt_iterator
*bsi
, enum tree_code code
, tree type
,
7151 ret
= fold_build1 (code
, type
, a
);
7154 return gimplify_val (bsi
, type
, ret
);
7159 /* Emit return warnings. */
7162 execute_warn_function_return (void)
7164 source_location location
;
7169 /* If we have a path to EXIT, then we do return. */
7170 if (TREE_THIS_VOLATILE (cfun
->decl
)
7171 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
7173 location
= UNKNOWN_LOCATION
;
7174 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
7176 last
= last_stmt (e
->src
);
7177 if (TREE_CODE (last
) == RETURN_EXPR
7178 && (location
= EXPR_LOCATION (last
)) != UNKNOWN_LOCATION
)
7181 if (location
== UNKNOWN_LOCATION
)
7182 location
= cfun
->function_end_locus
;
7183 warning (0, "%H%<noreturn%> function does return", &location
);
7186 /* If we see "return;" in some basic block, then we do reach the end
7187 without returning a value. */
7188 else if (warn_return_type
7189 && !TREE_NO_WARNING (cfun
->decl
)
7190 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
7191 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
7193 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
7195 tree last
= last_stmt (e
->src
);
7196 if (TREE_CODE (last
) == RETURN_EXPR
7197 && TREE_OPERAND (last
, 0) == NULL
7198 && !TREE_NO_WARNING (last
))
7200 location
= EXPR_LOCATION (last
);
7201 if (location
== UNKNOWN_LOCATION
)
7202 location
= cfun
->function_end_locus
;
7203 warning (OPT_Wreturn_type
, "%Hcontrol reaches end of non-void function", &location
);
7204 TREE_NO_WARNING (cfun
->decl
) = 1;
7213 /* Given a basic block B which ends with a conditional and has
7214 precisely two successors, determine which of the edges is taken if
7215 the conditional is true and which is taken if the conditional is
7216 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7219 extract_true_false_edges_from_block (basic_block b
,
7223 edge e
= EDGE_SUCC (b
, 0);
7225 if (e
->flags
& EDGE_TRUE_VALUE
)
7228 *false_edge
= EDGE_SUCC (b
, 1);
7233 *true_edge
= EDGE_SUCC (b
, 1);
7237 struct gimple_opt_pass pass_warn_function_return
=
7243 execute_warn_function_return
, /* execute */
7246 0, /* static_pass_number */
7248 PROP_cfg
, /* properties_required */
7249 0, /* properties_provided */
7250 0, /* properties_destroyed */
7251 0, /* todo_flags_start */
7252 0 /* todo_flags_finish */
7256 /* Emit noreturn warnings. */
7259 execute_warn_function_noreturn (void)
7261 if (warn_missing_noreturn
7262 && !TREE_THIS_VOLATILE (cfun
->decl
)
7263 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0
7264 && !lang_hooks
.missing_noreturn_ok_p (cfun
->decl
))
7265 warning (OPT_Wmissing_noreturn
, "%Jfunction might be possible candidate "
7266 "for attribute %<noreturn%>",
7271 struct gimple_opt_pass pass_warn_function_noreturn
=
7277 execute_warn_function_noreturn
, /* execute */
7280 0, /* static_pass_number */
7282 PROP_cfg
, /* properties_required */
7283 0, /* properties_provided */
7284 0, /* properties_destroyed */
7285 0, /* todo_flags_start */
7286 0 /* todo_flags_finish */