1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
46 #include "cfglayout.h"
48 /* This file contains functions for building the Control Flow Graph (CFG)
49 for a function tree. */
51 /* Local declarations. */
53 /* Initial capacity for the basic block array. */
54 static const int initial_cfg_capacity
= 20;
56 /* Mapping of labels to their associated blocks. This can greatly speed up
57 building of the CFG in code with lots of gotos. */
58 static GTY(()) varray_type label_to_block_map
;
63 long num_merged_labels
;
66 static struct cfg_stats_d cfg_stats
;
68 /* Nonzero if we found a computed goto while building basic blocks. */
69 static bool found_computed_goto
;
71 /* Basic blocks and flowgraphs. */
72 static basic_block
create_bb (void *, void *, basic_block
);
73 static void create_block_annotation (basic_block
);
74 static void free_blocks_annotations (void);
75 static void clear_blocks_annotations (void);
76 static void make_blocks (tree
);
77 static void factor_computed_gotos (void);
80 static void make_edges (void);
81 static void make_ctrl_stmt_edges (basic_block
);
82 static void make_exit_edges (basic_block
);
83 static void make_cond_expr_edges (basic_block
);
84 static void make_switch_expr_edges (basic_block
);
85 static void make_goto_expr_edges (basic_block
);
86 static edge
tree_redirect_edge_and_branch (edge
, basic_block
);
87 static edge
tree_try_redirect_by_replacing_jump (edge
, basic_block
);
88 static void split_critical_edges (void);
90 /* Various helpers. */
91 static inline bool stmt_starts_bb_p (tree
, tree
);
92 static int tree_verify_flow_info (void);
93 static void tree_make_forwarder_block (edge
);
94 static bool thread_jumps (void);
95 static bool tree_forwarder_block_p (basic_block
);
96 static void bsi_commit_edge_inserts_1 (edge e
);
97 static void tree_cfg2vcg (FILE *);
99 /* Flowgraph optimization and cleanup. */
100 static void tree_merge_blocks (basic_block
, basic_block
);
101 static bool tree_can_merge_blocks_p (basic_block
, basic_block
);
102 static void remove_bb (basic_block
);
103 static bool cleanup_control_flow (void);
104 static bool cleanup_control_expr_graph (basic_block
, block_stmt_iterator
);
105 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
106 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
107 static tree
find_case_label_for_value (tree
, tree
);
108 static bool phi_alternatives_equal (basic_block
, edge
, edge
);
111 /*---------------------------------------------------------------------------
113 ---------------------------------------------------------------------------*/
115 /* Entry point to the CFG builder for trees. TP points to the list of
116 statements to be added to the flowgraph. */
119 build_tree_cfg (tree
*tp
)
121 /* Register specific tree functions. */
122 tree_register_cfg_hooks ();
124 /* Initialize rbi_pool. */
127 /* Initialize the basic block array. */
129 profile_status
= PROFILE_ABSENT
;
131 last_basic_block
= 0;
132 VARRAY_BB_INIT (basic_block_info
, initial_cfg_capacity
, "basic_block_info");
133 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
135 /* Build a mapping of labels to their associated blocks. */
136 VARRAY_BB_INIT (label_to_block_map
, initial_cfg_capacity
,
137 "label to block map");
139 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
140 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
142 found_computed_goto
= 0;
145 /* Computed gotos are hell to deal with, especially if there are
146 lots of them with a large number of destinations. So we factor
147 them to a common computed goto location before we build the
148 edge list. After we convert back to normal form, we will un-factor
149 the computed gotos since factoring introduces an unwanted jump. */
150 if (found_computed_goto
)
151 factor_computed_gotos ();
153 /* Make sure there is always at least one block, even if it's empty. */
154 if (n_basic_blocks
== 0)
155 create_empty_bb (ENTRY_BLOCK_PTR
);
157 create_block_annotation (ENTRY_BLOCK_PTR
);
158 create_block_annotation (EXIT_BLOCK_PTR
);
160 /* Adjust the size of the array. */
161 VARRAY_GROW (basic_block_info
, n_basic_blocks
);
163 /* To speed up statement iterator walks, we first purge dead labels. */
164 cleanup_dead_labels ();
166 /* Group case nodes to reduce the number of edges.
167 We do this after cleaning up dead labels because otherwise we miss
168 a lot of obvious case merging opportunities. */
169 group_case_labels ();
171 /* Create the edges of the flowgraph. */
174 /* Debugging dumps. */
176 /* Write the flowgraph to a VCG file. */
178 int local_dump_flags
;
179 FILE *dump_file
= dump_begin (TDI_vcg
, &local_dump_flags
);
182 tree_cfg2vcg (dump_file
);
183 dump_end (TDI_vcg
, dump_file
);
187 /* Dump a textual representation of the flowgraph. */
189 dump_tree_cfg (dump_file
, dump_flags
);
193 execute_build_cfg (void)
195 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl
));
198 struct tree_opt_pass pass_build_cfg
=
202 execute_build_cfg
, /* execute */
205 0, /* static_pass_number */
206 TV_TREE_CFG
, /* tv_id */
207 PROP_gimple_leh
, /* properties_required */
208 PROP_cfg
, /* properties_provided */
209 0, /* properties_destroyed */
210 0, /* todo_flags_start */
211 TODO_verify_stmts
, /* todo_flags_finish */
215 /* Search the CFG for any computed gotos. If found, factor them to a
216 common computed goto site. Also record the location of that site so
217 that we can un-factor the gotos after we have converted back to
221 factor_computed_gotos (void)
224 tree factored_label_decl
= NULL
;
226 tree factored_computed_goto_label
= NULL
;
227 tree factored_computed_goto
= NULL
;
229 /* We know there are one or more computed gotos in this function.
230 Examine the last statement in each basic block to see if the block
231 ends with a computed goto. */
235 block_stmt_iterator bsi
= bsi_last (bb
);
240 last
= bsi_stmt (bsi
);
242 /* Ignore the computed goto we create when we factor the original
244 if (last
== factored_computed_goto
)
247 /* If the last statement is a computed goto, factor it. */
248 if (computed_goto_p (last
))
252 /* The first time we find a computed goto we need to create
253 the factored goto block and the variable each original
254 computed goto will use for their goto destination. */
255 if (! factored_computed_goto
)
257 basic_block new_bb
= create_empty_bb (bb
);
258 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
260 /* Create the destination of the factored goto. Each original
261 computed goto will put its desired destination into this
262 variable and jump to the label we create immediately
264 var
= create_tmp_var (ptr_type_node
, "gotovar");
266 /* Build a label for the new block which will contain the
267 factored computed goto. */
268 factored_label_decl
= create_artificial_label ();
269 factored_computed_goto_label
270 = build1 (LABEL_EXPR
, void_type_node
, factored_label_decl
);
271 bsi_insert_after (&new_bsi
, factored_computed_goto_label
,
274 /* Build our new computed goto. */
275 factored_computed_goto
= build1 (GOTO_EXPR
, void_type_node
, var
);
276 bsi_insert_after (&new_bsi
, factored_computed_goto
,
280 /* Copy the original computed goto's destination into VAR. */
281 assignment
= build (MODIFY_EXPR
, ptr_type_node
,
282 var
, GOTO_DESTINATION (last
));
283 bsi_insert_before (&bsi
, assignment
, BSI_SAME_STMT
);
285 /* And re-vector the computed goto to the new destination. */
286 GOTO_DESTINATION (last
) = factored_label_decl
;
292 /* Create annotations for a single basic block. */
295 create_block_annotation (basic_block bb
)
297 /* Verify that the tree_annotations field is clear. */
298 gcc_assert (!bb
->tree_annotations
);
299 bb
->tree_annotations
= ggc_alloc_cleared (sizeof (struct bb_ann_d
));
303 /* Free the annotations for all the basic blocks. */
305 static void free_blocks_annotations (void)
307 clear_blocks_annotations ();
311 /* Clear the annotations for all the basic blocks. */
314 clear_blocks_annotations (void)
318 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
319 bb
->tree_annotations
= NULL
;
323 /* Build a flowgraph for the statement_list STMT_LIST. */
326 make_blocks (tree stmt_list
)
328 tree_stmt_iterator i
= tsi_start (stmt_list
);
330 bool start_new_block
= true;
331 bool first_stmt_of_list
= true;
332 basic_block bb
= ENTRY_BLOCK_PTR
;
334 while (!tsi_end_p (i
))
341 /* If the statement starts a new basic block or if we have determined
342 in a previous pass that we need to create a new block for STMT, do
344 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
346 if (!first_stmt_of_list
)
347 stmt_list
= tsi_split_statement_list_before (&i
);
348 bb
= create_basic_block (stmt_list
, NULL
, bb
);
349 start_new_block
= false;
352 /* Now add STMT to BB and create the subgraphs for special statement
354 set_bb_for_stmt (stmt
, bb
);
356 if (computed_goto_p (stmt
))
357 found_computed_goto
= true;
359 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
361 if (stmt_ends_bb_p (stmt
))
362 start_new_block
= true;
365 first_stmt_of_list
= false;
370 /* Create and return a new empty basic block after bb AFTER. */
373 create_bb (void *h
, void *e
, basic_block after
)
379 /* Create and initialize a new basic block. */
381 memset (bb
, 0, sizeof (*bb
));
383 bb
->index
= last_basic_block
;
385 bb
->stmt_list
= h
? h
: alloc_stmt_list ();
387 /* Add the new block to the linked list of blocks. */
388 link_block (bb
, after
);
390 /* Grow the basic block array if needed. */
391 if ((size_t) last_basic_block
== VARRAY_SIZE (basic_block_info
))
393 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
394 VARRAY_GROW (basic_block_info
, new_size
);
397 /* Add the newly created block to the array. */
398 BASIC_BLOCK (last_basic_block
) = bb
;
400 create_block_annotation (bb
);
405 initialize_bb_rbi (bb
);
410 /*---------------------------------------------------------------------------
412 ---------------------------------------------------------------------------*/
414 /* Join all the blocks in the flowgraph. */
421 /* Create an edge from entry to the first block with executable
423 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
425 /* Traverse basic block array placing edges. */
428 tree first
= first_stmt (bb
);
429 tree last
= last_stmt (bb
);
433 /* Edges for statements that always alter flow control. */
434 if (is_ctrl_stmt (last
))
435 make_ctrl_stmt_edges (bb
);
437 /* Edges for statements that sometimes alter flow control. */
438 if (is_ctrl_altering_stmt (last
))
439 make_exit_edges (bb
);
442 /* Finally, if no edges were created above, this is a regular
443 basic block that only needs a fallthru edge. */
444 if (EDGE_COUNT (bb
->succs
) == 0)
445 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
448 /* We do not care about fake edges, so remove any that the CFG
449 builder inserted for completeness. */
450 remove_fake_exit_edges ();
452 /* Clean up the graph and warn for unreachable code. */
457 /* Create edges for control statement at basic block BB. */
460 make_ctrl_stmt_edges (basic_block bb
)
462 tree last
= last_stmt (bb
);
465 switch (TREE_CODE (last
))
468 make_goto_expr_edges (bb
);
472 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
476 make_cond_expr_edges (bb
);
480 make_switch_expr_edges (bb
);
484 make_eh_edges (last
);
485 /* Yet another NORETURN hack. */
486 if (EDGE_COUNT (bb
->succs
) == 0)
487 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
496 /* Create exit edges for statements in block BB that alter the flow of
497 control. Statements that alter the control flow are 'goto', 'return'
498 and calls to non-returning functions. */
501 make_exit_edges (basic_block bb
)
503 tree last
= last_stmt (bb
), op
;
506 switch (TREE_CODE (last
))
509 /* If this function receives a nonlocal goto, then we need to
510 make edges from this call site to all the nonlocal goto
512 if (TREE_SIDE_EFFECTS (last
)
513 && current_function_has_nonlocal_label
)
514 make_goto_expr_edges (bb
);
516 /* If this statement has reachable exception handlers, then
517 create abnormal edges to them. */
518 make_eh_edges (last
);
520 /* Some calls are known not to return. For such calls we create
523 We really need to revamp how we build edges so that it's not
524 such a bloody pain to avoid creating edges for this case since
525 all we do is remove these edges when we're done building the
527 if (call_expr_flags (last
) & (ECF_NORETURN
| ECF_LONGJMP
))
529 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
533 /* Don't forget the fall-thru edge. */
534 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
538 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
539 may have an abnormal edge. Search the RHS for this case and
540 create any required edges. */
541 op
= get_call_expr_in (last
);
542 if (op
&& TREE_SIDE_EFFECTS (op
)
543 && current_function_has_nonlocal_label
)
544 make_goto_expr_edges (bb
);
546 make_eh_edges (last
);
547 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
556 /* Create the edges for a COND_EXPR starting at block BB.
557 At this point, both clauses must contain only simple gotos. */
560 make_cond_expr_edges (basic_block bb
)
562 tree entry
= last_stmt (bb
);
563 basic_block then_bb
, else_bb
;
564 tree then_label
, else_label
;
567 gcc_assert (TREE_CODE (entry
) == COND_EXPR
);
569 /* Entry basic blocks for each component. */
570 then_label
= GOTO_DESTINATION (COND_EXPR_THEN (entry
));
571 else_label
= GOTO_DESTINATION (COND_EXPR_ELSE (entry
));
572 then_bb
= label_to_block (then_label
);
573 else_bb
= label_to_block (else_label
);
575 make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
576 make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
580 /* Create the edges for a SWITCH_EXPR starting at block BB.
581 At this point, the switch body has been lowered and the
582 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
585 make_switch_expr_edges (basic_block bb
)
587 tree entry
= last_stmt (bb
);
591 vec
= SWITCH_LABELS (entry
);
592 n
= TREE_VEC_LENGTH (vec
);
594 for (i
= 0; i
< n
; ++i
)
596 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
597 basic_block label_bb
= label_to_block (lab
);
598 make_edge (bb
, label_bb
, 0);
603 /* Return the basic block holding label DEST. */
606 label_to_block (tree dest
)
608 int uid
= LABEL_DECL_UID (dest
);
610 /* We would die hard when faced by an undefined label. Emit a label to
611 the very first basic block. This will hopefully make even the dataflow
612 and undefined variable warnings quite right. */
613 if ((errorcount
|| sorrycount
) && uid
< 0)
615 block_stmt_iterator bsi
= bsi_start (BASIC_BLOCK (0));
618 stmt
= build1 (LABEL_EXPR
, void_type_node
, dest
);
619 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
620 uid
= LABEL_DECL_UID (dest
);
622 return VARRAY_BB (label_to_block_map
, uid
);
626 /* Create edges for a goto statement at block BB. */
629 make_goto_expr_edges (basic_block bb
)
632 basic_block target_bb
;
634 block_stmt_iterator last
= bsi_last (bb
);
636 goto_t
= bsi_stmt (last
);
638 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
639 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
640 from a nonlocal goto. */
641 if (TREE_CODE (goto_t
) != GOTO_EXPR
)
643 dest
= error_mark_node
;
648 dest
= GOTO_DESTINATION (goto_t
);
651 /* A GOTO to a local label creates normal edges. */
652 if (simple_goto_p (goto_t
))
654 edge e
= make_edge (bb
, label_to_block (dest
), EDGE_FALLTHRU
);
655 #ifdef USE_MAPPED_LOCATION
656 e
->goto_locus
= EXPR_LOCATION (goto_t
);
658 e
->goto_locus
= EXPR_LOCUS (goto_t
);
664 /* Nothing more to do for nonlocal gotos. */
665 if (TREE_CODE (dest
) == LABEL_DECL
)
668 /* Computed gotos remain. */
671 /* Look for the block starting with the destination label. In the
672 case of a computed goto, make an edge to any label block we find
674 FOR_EACH_BB (target_bb
)
676 block_stmt_iterator bsi
;
678 for (bsi
= bsi_start (target_bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
680 tree target
= bsi_stmt (bsi
);
682 if (TREE_CODE (target
) != LABEL_EXPR
)
686 /* Computed GOTOs. Make an edge to every label block that has
687 been marked as a potential target for a computed goto. */
688 (FORCED_LABEL (LABEL_EXPR_LABEL (target
)) && for_call
== 0)
689 /* Nonlocal GOTO target. Make an edge to every label block
690 that has been marked as a potential target for a nonlocal
692 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target
)) && for_call
== 1))
694 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
700 /* Degenerate case of computed goto with no labels. */
701 if (!for_call
&& EDGE_COUNT (bb
->succs
) == 0)
702 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
706 /*---------------------------------------------------------------------------
708 ---------------------------------------------------------------------------*/
710 /* Remove unreachable blocks and other miscellaneous clean up work. */
713 cleanup_tree_cfg (void)
717 timevar_push (TV_TREE_CLEANUP_CFG
);
719 retval
= cleanup_control_flow ();
720 retval
|= delete_unreachable_blocks ();
721 retval
|= thread_jumps ();
723 #ifdef ENABLE_CHECKING
726 gcc_assert (!cleanup_control_flow ());
727 gcc_assert (!delete_unreachable_blocks ());
728 gcc_assert (!thread_jumps ());
732 /* Merging the blocks creates no new opportunities for the other
733 optimizations, so do it here. */
738 #ifdef ENABLE_CHECKING
741 timevar_pop (TV_TREE_CLEANUP_CFG
);
746 /* Cleanup useless labels in basic blocks. This is something we wish
747 to do early because it allows us to group case labels before creating
748 the edges for the CFG, and it speeds up block statement iterators in
750 We only run this pass once, running it more than once is probably not
753 /* A map from basic block index to the leading label of that block. */
754 static tree
*label_for_bb
;
756 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
758 update_eh_label (struct eh_region
*region
)
760 tree old_label
= get_eh_region_tree_label (region
);
764 basic_block bb
= label_to_block (old_label
);
766 /* ??? After optimizing, there may be EH regions with labels
767 that have already been removed from the function body, so
768 there is no basic block for them. */
772 new_label
= label_for_bb
[bb
->index
];
773 set_eh_region_tree_label (region
, new_label
);
777 /* Given LABEL return the first label in the same basic block. */
779 main_block_label (tree label
)
781 basic_block bb
= label_to_block (label
);
783 /* label_to_block possibly inserted undefined label into the chain. */
784 if (!label_for_bb
[bb
->index
])
785 label_for_bb
[bb
->index
] = label
;
786 return label_for_bb
[bb
->index
];
789 /* Cleanup redundant labels. This is a three-step process:
790 1) Find the leading label for each block.
791 2) Redirect all references to labels to the leading labels.
792 3) Cleanup all useless labels. */
795 cleanup_dead_labels (void)
798 label_for_bb
= xcalloc (last_basic_block
, sizeof (tree
));
800 /* Find a suitable label for each block. We use the first user-defined
801 label if there is one, or otherwise just the first label we see. */
804 block_stmt_iterator i
;
806 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
808 tree label
, stmt
= bsi_stmt (i
);
810 if (TREE_CODE (stmt
) != LABEL_EXPR
)
813 label
= LABEL_EXPR_LABEL (stmt
);
815 /* If we have not yet seen a label for the current block,
816 remember this one and see if there are more labels. */
817 if (! label_for_bb
[bb
->index
])
819 label_for_bb
[bb
->index
] = label
;
823 /* If we did see a label for the current block already, but it
824 is an artificially created label, replace it if the current
825 label is a user defined label. */
826 if (! DECL_ARTIFICIAL (label
)
827 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
]))
829 label_for_bb
[bb
->index
] = label
;
835 /* Now redirect all jumps/branches to the selected label.
836 First do so for each block ending in a control statement. */
839 tree stmt
= last_stmt (bb
);
843 switch (TREE_CODE (stmt
))
847 tree true_branch
, false_branch
;
849 true_branch
= COND_EXPR_THEN (stmt
);
850 false_branch
= COND_EXPR_ELSE (stmt
);
852 GOTO_DESTINATION (true_branch
)
853 = main_block_label (GOTO_DESTINATION (true_branch
));
854 GOTO_DESTINATION (false_branch
)
855 = main_block_label (GOTO_DESTINATION (false_branch
));
863 tree vec
= SWITCH_LABELS (stmt
);
864 size_t n
= TREE_VEC_LENGTH (vec
);
866 /* Replace all destination labels. */
867 for (i
= 0; i
< n
; ++i
)
868 CASE_LABEL (TREE_VEC_ELT (vec
, i
))
869 = main_block_label (CASE_LABEL (TREE_VEC_ELT (vec
, i
)));
874 /* We have to handle GOTO_EXPRs until they're removed, and we don't
875 remove them until after we've created the CFG edges. */
877 if (! computed_goto_p (stmt
))
879 GOTO_DESTINATION (stmt
)
880 = main_block_label (GOTO_DESTINATION (stmt
));
889 for_each_eh_region (update_eh_label
);
891 /* Finally, purge dead labels. All user-defined labels and labels that
892 can be the target of non-local gotos are preserved. */
895 block_stmt_iterator i
;
896 tree label_for_this_bb
= label_for_bb
[bb
->index
];
898 if (! label_for_this_bb
)
901 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
903 tree label
, stmt
= bsi_stmt (i
);
905 if (TREE_CODE (stmt
) != LABEL_EXPR
)
908 label
= LABEL_EXPR_LABEL (stmt
);
910 if (label
== label_for_this_bb
911 || ! DECL_ARTIFICIAL (label
)
912 || DECL_NONLOCAL (label
))
922 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
923 and scan the sorted vector of cases. Combine the ones jumping to the
925 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
928 group_case_labels (void)
934 tree stmt
= last_stmt (bb
);
935 if (stmt
&& TREE_CODE (stmt
) == SWITCH_EXPR
)
937 tree labels
= SWITCH_LABELS (stmt
);
938 int old_size
= TREE_VEC_LENGTH (labels
);
939 int i
, j
, new_size
= old_size
;
940 tree default_case
= TREE_VEC_ELT (labels
, old_size
- 1);
943 /* The default label is always the last case in a switch
944 statement after gimplification. */
945 default_label
= CASE_LABEL (default_case
);
947 /* Look for possible opportunities to merge cases.
948 Ignore the last element of the label vector because it
949 must be the default case. */
951 while (i
< old_size
- 1)
953 tree base_case
, base_label
, base_high
, type
;
954 base_case
= TREE_VEC_ELT (labels
, i
);
956 gcc_assert (base_case
);
957 base_label
= CASE_LABEL (base_case
);
959 /* Discard cases that have the same destination as the
961 if (base_label
== default_label
)
963 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
969 type
= TREE_TYPE (CASE_LOW (base_case
));
970 base_high
= CASE_HIGH (base_case
) ?
971 CASE_HIGH (base_case
) : CASE_LOW (base_case
);
973 /* Try to merge case labels. Break out when we reach the end
974 of the label vector or when we cannot merge the next case
975 label with the current one. */
976 while (i
< old_size
- 1)
978 tree merge_case
= TREE_VEC_ELT (labels
, i
);
979 tree merge_label
= CASE_LABEL (merge_case
);
980 tree t
= int_const_binop (PLUS_EXPR
, base_high
,
981 integer_one_node
, 1);
983 /* Merge the cases if they jump to the same place,
984 and their ranges are consecutive. */
985 if (merge_label
== base_label
986 && tree_int_cst_equal (CASE_LOW (merge_case
), t
))
988 base_high
= CASE_HIGH (merge_case
) ?
989 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
990 CASE_HIGH (base_case
) = base_high
;
991 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1000 /* Compress the case labels in the label vector, and adjust the
1001 length of the vector. */
1002 for (i
= 0, j
= 0; i
< new_size
; i
++)
1004 while (! TREE_VEC_ELT (labels
, j
))
1006 TREE_VEC_ELT (labels
, i
) = TREE_VEC_ELT (labels
, j
++);
1008 TREE_VEC_LENGTH (labels
) = new_size
;
1013 /* Checks whether we can merge block B into block A. */
1016 tree_can_merge_blocks_p (basic_block a
, basic_block b
)
1019 block_stmt_iterator bsi
;
1021 if (EDGE_COUNT (a
->succs
) != 1)
1024 if (EDGE_SUCC (a
, 0)->flags
& EDGE_ABNORMAL
)
1027 if (EDGE_SUCC (a
, 0)->dest
!= b
)
1030 if (b
== EXIT_BLOCK_PTR
)
1033 if (EDGE_COUNT (b
->preds
) > 1)
1036 /* If A ends by a statement causing exceptions or something similar, we
1037 cannot merge the blocks. */
1038 stmt
= last_stmt (a
);
1039 if (stmt
&& stmt_ends_bb_p (stmt
))
1042 /* Do not allow a block with only a non-local label to be merged. */
1043 if (stmt
&& TREE_CODE (stmt
) == LABEL_EXPR
1044 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1047 /* There may be no phi nodes at the start of b. Most of these degenerate
1048 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1052 /* Do not remove user labels. */
1053 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1055 stmt
= bsi_stmt (bsi
);
1056 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1058 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt
)))
1066 /* Merge block B into block A. */
1069 tree_merge_blocks (basic_block a
, basic_block b
)
1071 block_stmt_iterator bsi
;
1072 tree_stmt_iterator last
;
1075 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1077 /* Ensure that B follows A. */
1078 move_block_after (b
, a
);
1080 gcc_assert (EDGE_SUCC (a
, 0)->flags
& EDGE_FALLTHRU
);
1081 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1083 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1084 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
);)
1086 if (TREE_CODE (bsi_stmt (bsi
)) == LABEL_EXPR
)
1090 set_bb_for_stmt (bsi_stmt (bsi
), a
);
1095 /* Merge the chains. */
1096 last
= tsi_last (a
->stmt_list
);
1097 tsi_link_after (&last
, b
->stmt_list
, TSI_NEW_STMT
);
1098 b
->stmt_list
= NULL
;
1102 /* Walk the function tree removing unnecessary statements.
1104 * Empty statement nodes are removed
1106 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1108 * Unnecessary COND_EXPRs are removed
1110 * Some unnecessary BIND_EXPRs are removed
1112 Clearly more work could be done. The trick is doing the analysis
1113 and removal fast enough to be a net improvement in compile times.
1115 Note that when we remove a control structure such as a COND_EXPR
1116 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1117 to ensure we eliminate all the useless code. */
1128 static void remove_useless_stmts_1 (tree
*, struct rus_data
*);
1131 remove_useless_stmts_warn_notreached (tree stmt
)
1133 if (EXPR_HAS_LOCATION (stmt
))
1135 location_t loc
= EXPR_LOCATION (stmt
);
1136 warning ("%Hwill never be executed", &loc
);
1140 switch (TREE_CODE (stmt
))
1142 case STATEMENT_LIST
:
1144 tree_stmt_iterator i
;
1145 for (i
= tsi_start (stmt
); !tsi_end_p (i
); tsi_next (&i
))
1146 if (remove_useless_stmts_warn_notreached (tsi_stmt (i
)))
1152 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt
)))
1154 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt
)))
1156 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt
)))
1160 case TRY_FINALLY_EXPR
:
1161 case TRY_CATCH_EXPR
:
1162 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 0)))
1164 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 1)))
1169 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt
));
1170 case EH_FILTER_EXPR
:
1171 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt
));
1173 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt
));
1176 /* Not a live container. */
1184 remove_useless_stmts_cond (tree
*stmt_p
, struct rus_data
*data
)
1186 tree then_clause
, else_clause
, cond
;
1187 bool save_has_label
, then_has_label
, else_has_label
;
1189 save_has_label
= data
->has_label
;
1190 data
->has_label
= false;
1191 data
->last_goto
= NULL
;
1193 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p
), data
);
1195 then_has_label
= data
->has_label
;
1196 data
->has_label
= false;
1197 data
->last_goto
= NULL
;
1199 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p
), data
);
1201 else_has_label
= data
->has_label
;
1202 data
->has_label
= save_has_label
| then_has_label
| else_has_label
;
1204 then_clause
= COND_EXPR_THEN (*stmt_p
);
1205 else_clause
= COND_EXPR_ELSE (*stmt_p
);
1206 cond
= COND_EXPR_COND (*stmt_p
);
1208 /* If neither arm does anything at all, we can remove the whole IF. */
1209 if (!TREE_SIDE_EFFECTS (then_clause
) && !TREE_SIDE_EFFECTS (else_clause
))
1211 *stmt_p
= build_empty_stmt ();
1212 data
->repeat
= true;
1215 /* If there are no reachable statements in an arm, then we can
1216 zap the entire conditional. */
1217 else if (integer_nonzerop (cond
) && !else_has_label
)
1219 if (warn_notreached
)
1220 remove_useless_stmts_warn_notreached (else_clause
);
1221 *stmt_p
= then_clause
;
1222 data
->repeat
= true;
1224 else if (integer_zerop (cond
) && !then_has_label
)
1226 if (warn_notreached
)
1227 remove_useless_stmts_warn_notreached (then_clause
);
1228 *stmt_p
= else_clause
;
1229 data
->repeat
= true;
1232 /* Check a couple of simple things on then/else with single stmts. */
1235 tree then_stmt
= expr_only (then_clause
);
1236 tree else_stmt
= expr_only (else_clause
);
1238 /* Notice branches to a common destination. */
1239 if (then_stmt
&& else_stmt
1240 && TREE_CODE (then_stmt
) == GOTO_EXPR
1241 && TREE_CODE (else_stmt
) == GOTO_EXPR
1242 && (GOTO_DESTINATION (then_stmt
) == GOTO_DESTINATION (else_stmt
)))
1244 *stmt_p
= then_stmt
;
1245 data
->repeat
= true;
1248 /* If the THEN/ELSE clause merely assigns a value to a variable or
1249 parameter which is already known to contain that value, then
1250 remove the useless THEN/ELSE clause. */
1251 else if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1254 && TREE_CODE (else_stmt
) == MODIFY_EXPR
1255 && TREE_OPERAND (else_stmt
, 0) == cond
1256 && integer_zerop (TREE_OPERAND (else_stmt
, 1)))
1257 COND_EXPR_ELSE (*stmt_p
) = alloc_stmt_list ();
1259 else if ((TREE_CODE (cond
) == EQ_EXPR
|| TREE_CODE (cond
) == NE_EXPR
)
1260 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1261 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1262 && TREE_CONSTANT (TREE_OPERAND (cond
, 1)))
1264 tree stmt
= (TREE_CODE (cond
) == EQ_EXPR
1265 ? then_stmt
: else_stmt
);
1266 tree
*location
= (TREE_CODE (cond
) == EQ_EXPR
1267 ? &COND_EXPR_THEN (*stmt_p
)
1268 : &COND_EXPR_ELSE (*stmt_p
));
1271 && TREE_CODE (stmt
) == MODIFY_EXPR
1272 && TREE_OPERAND (stmt
, 0) == TREE_OPERAND (cond
, 0)
1273 && TREE_OPERAND (stmt
, 1) == TREE_OPERAND (cond
, 1))
1274 *location
= alloc_stmt_list ();
1278 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1279 would be re-introduced during lowering. */
1280 data
->last_goto
= NULL
;
1285 remove_useless_stmts_tf (tree
*stmt_p
, struct rus_data
*data
)
1287 bool save_may_branch
, save_may_throw
;
1288 bool this_may_branch
, this_may_throw
;
1290 /* Collect may_branch and may_throw information for the body only. */
1291 save_may_branch
= data
->may_branch
;
1292 save_may_throw
= data
->may_throw
;
1293 data
->may_branch
= false;
1294 data
->may_throw
= false;
1295 data
->last_goto
= NULL
;
1297 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1299 this_may_branch
= data
->may_branch
;
1300 this_may_throw
= data
->may_throw
;
1301 data
->may_branch
|= save_may_branch
;
1302 data
->may_throw
|= save_may_throw
;
1303 data
->last_goto
= NULL
;
1305 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1307 /* If the body is empty, then we can emit the FINALLY block without
1308 the enclosing TRY_FINALLY_EXPR. */
1309 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 0)))
1311 *stmt_p
= TREE_OPERAND (*stmt_p
, 1);
1312 data
->repeat
= true;
1315 /* If the handler is empty, then we can emit the TRY block without
1316 the enclosing TRY_FINALLY_EXPR. */
1317 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1319 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1320 data
->repeat
= true;
1323 /* If the body neither throws, nor branches, then we can safely
1324 string the TRY and FINALLY blocks together. */
1325 else if (!this_may_branch
&& !this_may_throw
)
1327 tree stmt
= *stmt_p
;
1328 *stmt_p
= TREE_OPERAND (stmt
, 0);
1329 append_to_statement_list (TREE_OPERAND (stmt
, 1), stmt_p
);
1330 data
->repeat
= true;
1336 remove_useless_stmts_tc (tree
*stmt_p
, struct rus_data
*data
)
1338 bool save_may_throw
, this_may_throw
;
1339 tree_stmt_iterator i
;
1342 /* Collect may_throw information for the body only. */
1343 save_may_throw
= data
->may_throw
;
1344 data
->may_throw
= false;
1345 data
->last_goto
= NULL
;
1347 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1349 this_may_throw
= data
->may_throw
;
1350 data
->may_throw
= save_may_throw
;
1352 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1353 if (!this_may_throw
)
1355 if (warn_notreached
)
1356 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p
, 1));
1357 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1358 data
->repeat
= true;
1362 /* Process the catch clause specially. We may be able to tell that
1363 no exceptions propagate past this point. */
1365 this_may_throw
= true;
1366 i
= tsi_start (TREE_OPERAND (*stmt_p
, 1));
1367 stmt
= tsi_stmt (i
);
1368 data
->last_goto
= NULL
;
1370 switch (TREE_CODE (stmt
))
1373 for (; !tsi_end_p (i
); tsi_next (&i
))
1375 stmt
= tsi_stmt (i
);
1376 /* If we catch all exceptions, then the body does not
1377 propagate exceptions past this point. */
1378 if (CATCH_TYPES (stmt
) == NULL
)
1379 this_may_throw
= false;
1380 data
->last_goto
= NULL
;
1381 remove_useless_stmts_1 (&CATCH_BODY (stmt
), data
);
1385 case EH_FILTER_EXPR
:
1386 if (EH_FILTER_MUST_NOT_THROW (stmt
))
1387 this_may_throw
= false;
1388 else if (EH_FILTER_TYPES (stmt
) == NULL
)
1389 this_may_throw
= false;
1390 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt
), data
);
1394 /* Otherwise this is a cleanup. */
1395 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1397 /* If the cleanup is empty, then we can emit the TRY block without
1398 the enclosing TRY_CATCH_EXPR. */
1399 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1401 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1402 data
->repeat
= true;
1406 data
->may_throw
|= this_may_throw
;
1411 remove_useless_stmts_bind (tree
*stmt_p
, struct rus_data
*data
)
1415 /* First remove anything underneath the BIND_EXPR. */
1416 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p
), data
);
1418 /* If the BIND_EXPR has no variables, then we can pull everything
1419 up one level and remove the BIND_EXPR, unless this is the toplevel
1420 BIND_EXPR for the current function or an inlined function.
1422 When this situation occurs we will want to apply this
1423 optimization again. */
1424 block
= BIND_EXPR_BLOCK (*stmt_p
);
1425 if (BIND_EXPR_VARS (*stmt_p
) == NULL_TREE
1426 && *stmt_p
!= DECL_SAVED_TREE (current_function_decl
)
1428 || ! BLOCK_ABSTRACT_ORIGIN (block
)
1429 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
))
1432 *stmt_p
= BIND_EXPR_BODY (*stmt_p
);
1433 data
->repeat
= true;
1439 remove_useless_stmts_goto (tree
*stmt_p
, struct rus_data
*data
)
1441 tree dest
= GOTO_DESTINATION (*stmt_p
);
1443 data
->may_branch
= true;
1444 data
->last_goto
= NULL
;
1446 /* Record the last goto expr, so that we can delete it if unnecessary. */
1447 if (TREE_CODE (dest
) == LABEL_DECL
)
1448 data
->last_goto
= stmt_p
;
1453 remove_useless_stmts_label (tree
*stmt_p
, struct rus_data
*data
)
1455 tree label
= LABEL_EXPR_LABEL (*stmt_p
);
1457 data
->has_label
= true;
1459 /* We do want to jump across non-local label receiver code. */
1460 if (DECL_NONLOCAL (label
))
1461 data
->last_goto
= NULL
;
1463 else if (data
->last_goto
&& GOTO_DESTINATION (*data
->last_goto
) == label
)
1465 *data
->last_goto
= build_empty_stmt ();
1466 data
->repeat
= true;
1469 /* ??? Add something here to delete unused labels. */
1473 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1474 decl. This allows us to eliminate redundant or useless
1475 calls to "const" functions.
1477 Gimplifier already does the same operation, but we may notice functions
1478 being const and pure once their calls has been gimplified, so we need
1479 to update the flag. */
1482 update_call_expr_flags (tree call
)
1484 tree decl
= get_callee_fndecl (call
);
1487 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1488 TREE_SIDE_EFFECTS (call
) = 0;
1489 if (TREE_NOTHROW (decl
))
1490 TREE_NOTHROW (call
) = 1;
1494 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1497 notice_special_calls (tree t
)
1499 int flags
= call_expr_flags (t
);
1501 if (flags
& ECF_MAY_BE_ALLOCA
)
1502 current_function_calls_alloca
= true;
1503 if (flags
& ECF_RETURNS_TWICE
)
1504 current_function_calls_setjmp
= true;
1508 /* Clear flags set by notice_special_calls. Used by dead code removal
1509 to update the flags. */
1512 clear_special_calls (void)
1514 current_function_calls_alloca
= false;
1515 current_function_calls_setjmp
= false;
1520 remove_useless_stmts_1 (tree
*tp
, struct rus_data
*data
)
1524 switch (TREE_CODE (t
))
1527 remove_useless_stmts_cond (tp
, data
);
1530 case TRY_FINALLY_EXPR
:
1531 remove_useless_stmts_tf (tp
, data
);
1534 case TRY_CATCH_EXPR
:
1535 remove_useless_stmts_tc (tp
, data
);
1539 remove_useless_stmts_bind (tp
, data
);
1543 remove_useless_stmts_goto (tp
, data
);
1547 remove_useless_stmts_label (tp
, data
);
1552 data
->last_goto
= NULL
;
1553 data
->may_branch
= true;
1558 data
->last_goto
= NULL
;
1559 notice_special_calls (t
);
1560 update_call_expr_flags (t
);
1561 if (tree_could_throw_p (t
))
1562 data
->may_throw
= true;
1566 data
->last_goto
= NULL
;
1568 op
= get_call_expr_in (t
);
1571 update_call_expr_flags (op
);
1572 notice_special_calls (op
);
1574 if (tree_could_throw_p (t
))
1575 data
->may_throw
= true;
1578 case STATEMENT_LIST
:
1580 tree_stmt_iterator i
= tsi_start (t
);
1581 while (!tsi_end_p (i
))
1584 if (IS_EMPTY_STMT (t
))
1590 remove_useless_stmts_1 (tsi_stmt_ptr (i
), data
);
1593 if (TREE_CODE (t
) == STATEMENT_LIST
)
1595 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1605 data
->last_goto
= NULL
;
1609 data
->last_goto
= NULL
;
1615 remove_useless_stmts (void)
1617 struct rus_data data
;
1619 clear_special_calls ();
1623 memset (&data
, 0, sizeof (data
));
1624 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl
), &data
);
1626 while (data
.repeat
);
1630 struct tree_opt_pass pass_remove_useless_stmts
=
1632 "useless", /* name */
1634 remove_useless_stmts
, /* execute */
1637 0, /* static_pass_number */
1639 PROP_gimple_any
, /* properties_required */
1640 0, /* properties_provided */
1641 0, /* properties_destroyed */
1642 0, /* todo_flags_start */
1643 TODO_dump_func
, /* todo_flags_finish */
1648 /* Remove obviously useless statements in basic block BB. */
1651 cfg_remove_useless_stmts_bb (basic_block bb
)
1653 block_stmt_iterator bsi
;
1654 tree stmt
= NULL_TREE
;
1655 tree cond
, var
= NULL_TREE
, val
= NULL_TREE
;
1656 struct var_ann_d
*ann
;
1658 /* Check whether we come here from a condition, and if so, get the
1660 if (EDGE_COUNT (bb
->preds
) != 1
1661 || !(EDGE_PRED (bb
, 0)->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
1664 cond
= COND_EXPR_COND (last_stmt (EDGE_PRED (bb
, 0)->src
));
1666 if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1669 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1670 ? boolean_false_node
: boolean_true_node
);
1672 else if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
1673 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1674 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
))
1676 var
= TREE_OPERAND (cond
, 0);
1677 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1678 ? boolean_true_node
: boolean_false_node
);
1682 if (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
)
1683 cond
= invert_truthvalue (cond
);
1684 if (TREE_CODE (cond
) == EQ_EXPR
1685 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1686 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1687 && (TREE_CODE (TREE_OPERAND (cond
, 1)) == VAR_DECL
1688 || TREE_CODE (TREE_OPERAND (cond
, 1)) == PARM_DECL
1689 || TREE_CONSTANT (TREE_OPERAND (cond
, 1))))
1691 var
= TREE_OPERAND (cond
, 0);
1692 val
= TREE_OPERAND (cond
, 1);
1698 /* Only work for normal local variables. */
1699 ann
= var_ann (var
);
1702 || TREE_ADDRESSABLE (var
))
1705 if (! TREE_CONSTANT (val
))
1707 ann
= var_ann (val
);
1710 || TREE_ADDRESSABLE (val
))
1714 /* Ignore floating point variables, since comparison behaves weird for
1716 if (FLOAT_TYPE_P (TREE_TYPE (var
)))
1719 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
);)
1721 stmt
= bsi_stmt (bsi
);
1723 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1724 which is already known to contain that value, then remove the useless
1725 THEN/ELSE clause. */
1726 if (TREE_CODE (stmt
) == MODIFY_EXPR
1727 && TREE_OPERAND (stmt
, 0) == var
1728 && operand_equal_p (val
, TREE_OPERAND (stmt
, 1), 0))
1734 /* Invalidate the var if we encounter something that could modify it.
1735 Likewise for the value it was previously set to. Note that we only
1736 consider values that are either a VAR_DECL or PARM_DECL so we
1737 can test for conflict very simply. */
1738 if (TREE_CODE (stmt
) == ASM_EXPR
1739 || (TREE_CODE (stmt
) == MODIFY_EXPR
1740 && (TREE_OPERAND (stmt
, 0) == var
1741 || TREE_OPERAND (stmt
, 0) == val
)))
1749 /* A CFG-aware version of remove_useless_stmts. */
1752 cfg_remove_useless_stmts (void)
1756 #ifdef ENABLE_CHECKING
1757 verify_flow_info ();
1762 cfg_remove_useless_stmts_bb (bb
);
1767 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1770 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1774 /* Since this block is no longer reachable, we can just delete all
1775 of its PHI nodes. */
1776 phi
= phi_nodes (bb
);
1779 tree next
= PHI_CHAIN (phi
);
1780 remove_phi_node (phi
, NULL_TREE
, bb
);
1784 /* Remove edges to BB's successors. */
1785 while (EDGE_COUNT (bb
->succs
) > 0)
1786 ssa_remove_edge (EDGE_SUCC (bb
, 0));
1790 /* Remove statements of basic block BB. */
1793 remove_bb (basic_block bb
)
1795 block_stmt_iterator i
;
1796 source_locus loc
= 0;
1800 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1801 if (dump_flags
& TDF_DETAILS
)
1803 dump_bb (bb
, dump_file
, 0);
1804 fprintf (dump_file
, "\n");
1808 /* Remove all the instructions in the block. */
1809 for (i
= bsi_start (bb
); !bsi_end_p (i
);)
1811 tree stmt
= bsi_stmt (i
);
1812 if (TREE_CODE (stmt
) == LABEL_EXPR
1813 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt
)))
1815 basic_block new_bb
= bb
->prev_bb
;
1816 block_stmt_iterator new_bsi
= bsi_after_labels (new_bb
);
1819 bsi_insert_after (&new_bsi
, stmt
, BSI_NEW_STMT
);
1823 release_defs (stmt
);
1825 set_bb_for_stmt (stmt
, NULL
);
1829 /* Don't warn for removed gotos. Gotos are often removed due to
1830 jump threading, thus resulting in bogus warnings. Not great,
1831 since this way we lose warnings for gotos in the original
1832 program that are indeed unreachable. */
1833 if (TREE_CODE (stmt
) != GOTO_EXPR
&& EXPR_HAS_LOCATION (stmt
) && !loc
)
1834 #ifdef USE_MAPPED_LOCATION
1835 loc
= EXPR_LOCATION (stmt
);
1837 loc
= EXPR_LOCUS (stmt
);
1841 /* If requested, give a warning that the first statement in the
1842 block is unreachable. We walk statements backwards in the
1843 loop above, so the last statement we process is the first statement
1845 if (warn_notreached
&& loc
)
1846 #ifdef USE_MAPPED_LOCATION
1847 warning ("%Hwill never be executed", &loc
);
1849 warning ("%Hwill never be executed", loc
);
1852 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1855 /* Try to remove superfluous control structures. */
1858 cleanup_control_flow (void)
1861 block_stmt_iterator bsi
;
1862 bool retval
= false;
1867 bsi
= bsi_last (bb
);
1869 if (bsi_end_p (bsi
))
1872 stmt
= bsi_stmt (bsi
);
1873 if (TREE_CODE (stmt
) == COND_EXPR
1874 || TREE_CODE (stmt
) == SWITCH_EXPR
)
1875 retval
|= cleanup_control_expr_graph (bb
, bsi
);
1881 /* Disconnect an unreachable block in the control expression starting
1885 cleanup_control_expr_graph (basic_block bb
, block_stmt_iterator bsi
)
1888 bool retval
= false;
1889 tree expr
= bsi_stmt (bsi
), val
;
1891 if (EDGE_COUNT (bb
->succs
) > 1)
1896 switch (TREE_CODE (expr
))
1899 val
= COND_EXPR_COND (expr
);
1903 val
= SWITCH_COND (expr
);
1904 if (TREE_CODE (val
) != INTEGER_CST
)
1912 taken_edge
= find_taken_edge (bb
, val
);
1916 /* Remove all the edges except the one that is always executed. */
1917 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
1919 if (e
!= taken_edge
)
1921 taken_edge
->probability
+= e
->probability
;
1922 taken_edge
->count
+= e
->count
;
1923 ssa_remove_edge (e
);
1929 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
1930 taken_edge
->probability
= REG_BR_PROB_BASE
;
1933 taken_edge
= EDGE_SUCC (bb
, 0);
1936 taken_edge
->flags
= EDGE_FALLTHRU
;
1938 /* We removed some paths from the cfg. */
1939 free_dominance_info (CDI_DOMINATORS
);
1945 /* Given a control block BB and a predicate VAL, return the edge that
1946 will be taken out of the block. If VAL does not match a unique
1947 edge, NULL is returned. */
1950 find_taken_edge (basic_block bb
, tree val
)
1954 stmt
= last_stmt (bb
);
1957 gcc_assert (is_ctrl_stmt (stmt
));
1959 /* If VAL is a predicate of the form N RELOP N, where N is an
1960 SSA_NAME, we can usually determine its truth value. */
1961 if (val
&& COMPARISON_CLASS_P (val
))
1964 /* If VAL is not a constant, we can't determine which edge might
1966 if (val
== NULL
|| !really_constant_p (val
))
1969 if (TREE_CODE (stmt
) == COND_EXPR
)
1970 return find_taken_edge_cond_expr (bb
, val
);
1972 if (TREE_CODE (stmt
) == SWITCH_EXPR
)
1973 return find_taken_edge_switch_expr (bb
, val
);
1975 return EDGE_SUCC (bb
, 0);
1979 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1980 statement, determine which of the two edges will be taken out of the
1981 block. Return NULL if either edge may be taken. */
1984 find_taken_edge_cond_expr (basic_block bb
, tree val
)
1986 edge true_edge
, false_edge
;
1988 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1990 /* If both edges of the branch lead to the same basic block, it doesn't
1991 matter which edge is taken. */
1992 if (true_edge
->dest
== false_edge
->dest
)
1995 /* Otherwise, try to determine which branch of the if() will be taken.
1996 If VAL is a constant but it can't be reduced to a 0 or a 1, then
1997 we don't really know which edge will be taken at runtime. This
1998 may happen when comparing addresses (e.g., if (&var1 == 4)). */
1999 if (integer_nonzerop (val
))
2001 else if (integer_zerop (val
))
2008 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2009 statement, determine which edge will be taken out of the block. Return
2010 NULL if any edge may be taken. */
2013 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2015 tree switch_expr
, taken_case
;
2016 basic_block dest_bb
;
2019 if (TREE_CODE (val
) != INTEGER_CST
)
2022 switch_expr
= last_stmt (bb
);
2023 taken_case
= find_case_label_for_value (switch_expr
, val
);
2024 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2026 e
= find_edge (bb
, dest_bb
);
2032 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2033 We can make optimal use here of the fact that the case labels are
2034 sorted: We can do a binary search for a case matching VAL. */
2037 find_case_label_for_value (tree switch_expr
, tree val
)
2039 tree vec
= SWITCH_LABELS (switch_expr
);
2040 size_t low
, high
, n
= TREE_VEC_LENGTH (vec
);
2041 tree default_case
= TREE_VEC_ELT (vec
, n
- 1);
2043 for (low
= -1, high
= n
- 1; high
- low
> 1; )
2045 size_t i
= (high
+ low
) / 2;
2046 tree t
= TREE_VEC_ELT (vec
, i
);
2049 /* Cache the result of comparing CASE_LOW and val. */
2050 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2057 if (CASE_HIGH (t
) == NULL
)
2059 /* A singe-valued case label. */
2065 /* A case range. We can only handle integer ranges. */
2066 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2071 return default_case
;
2075 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2076 those alternatives are equal in each of the PHI nodes, then return
2077 true, else return false. */
2080 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
2082 tree phi
, val1
, val2
;
2085 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
2087 n1
= phi_arg_from_edge (phi
, e1
);
2088 n2
= phi_arg_from_edge (phi
, e2
);
2090 gcc_assert (n1
>= 0);
2091 gcc_assert (n2
>= 0);
2093 val1
= PHI_ARG_DEF (phi
, n1
);
2094 val2
= PHI_ARG_DEF (phi
, n2
);
2096 if (!operand_equal_p (val1
, val2
, 0))
2104 /*---------------------------------------------------------------------------
2106 ---------------------------------------------------------------------------*/
2108 /* Dump tree-specific information of block BB to file OUTF. */
2111 tree_dump_bb (basic_block bb
, FILE *outf
, int indent
)
2113 dump_generic_bb (outf
, bb
, indent
, TDF_VOPS
);
2117 /* Dump a basic block on stderr. */
2120 debug_tree_bb (basic_block bb
)
2122 dump_bb (bb
, stderr
, 0);
2126 /* Dump basic block with index N on stderr. */
2129 debug_tree_bb_n (int n
)
2131 debug_tree_bb (BASIC_BLOCK (n
));
2132 return BASIC_BLOCK (n
);
2136 /* Dump the CFG on stderr.
2138 FLAGS are the same used by the tree dumping functions
2139 (see TDF_* in tree.h). */
2142 debug_tree_cfg (int flags
)
2144 dump_tree_cfg (stderr
, flags
);
2148 /* Dump the program showing basic block boundaries on the given FILE.
2150 FLAGS are the same used by the tree dumping functions (see TDF_* in
2154 dump_tree_cfg (FILE *file
, int flags
)
2156 if (flags
& TDF_DETAILS
)
2158 const char *funcname
2159 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2162 fprintf (file
, ";; Function %s\n\n", funcname
);
2163 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2164 n_basic_blocks
, n_edges
, last_basic_block
);
2166 brief_dump_cfg (file
);
2167 fprintf (file
, "\n");
2170 if (flags
& TDF_STATS
)
2171 dump_cfg_stats (file
);
2173 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2177 /* Dump CFG statistics on FILE. */
2180 dump_cfg_stats (FILE *file
)
2182 static long max_num_merged_labels
= 0;
2183 unsigned long size
, total
= 0;
2186 const char * const fmt_str
= "%-30s%-13s%12s\n";
2187 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2188 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2189 const char *funcname
2190 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2193 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2195 fprintf (file
, "---------------------------------------------------------\n");
2196 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2197 fprintf (file
, fmt_str
, "", " instances ", "used ");
2198 fprintf (file
, "---------------------------------------------------------\n");
2200 size
= n_basic_blocks
* sizeof (struct basic_block_def
);
2202 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks
,
2203 SCALE (size
), LABEL (size
));
2207 n_edges
+= EDGE_COUNT (bb
->succs
);
2208 size
= n_edges
* sizeof (struct edge_def
);
2210 fprintf (file
, fmt_str_1
, "Edges", n_edges
, SCALE (size
), LABEL (size
));
2212 size
= n_basic_blocks
* sizeof (struct bb_ann_d
);
2214 fprintf (file
, fmt_str_1
, "Basic block annotations", n_basic_blocks
,
2215 SCALE (size
), LABEL (size
));
2217 fprintf (file
, "---------------------------------------------------------\n");
2218 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2220 fprintf (file
, "---------------------------------------------------------\n");
2221 fprintf (file
, "\n");
2223 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2224 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2226 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2227 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2229 fprintf (file
, "\n");
2233 /* Dump CFG statistics on stderr. Keep extern so that it's always
2234 linked in the final executable. */
2237 debug_cfg_stats (void)
2239 dump_cfg_stats (stderr
);
2243 /* Dump the flowgraph to a .vcg FILE. */
2246 tree_cfg2vcg (FILE *file
)
2251 const char *funcname
2252 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2254 /* Write the file header. */
2255 fprintf (file
, "graph: { title: \"%s\"\n", funcname
);
2256 fprintf (file
, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2257 fprintf (file
, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2259 /* Write blocks and edges. */
2260 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2262 fprintf (file
, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2265 if (e
->flags
& EDGE_FAKE
)
2266 fprintf (file
, " linestyle: dotted priority: 10");
2268 fprintf (file
, " linestyle: solid priority: 100");
2270 fprintf (file
, " }\n");
2276 enum tree_code head_code
, end_code
;
2277 const char *head_name
, *end_name
;
2280 tree first
= first_stmt (bb
);
2281 tree last
= last_stmt (bb
);
2285 head_code
= TREE_CODE (first
);
2286 head_name
= tree_code_name
[head_code
];
2287 head_line
= get_lineno (first
);
2290 head_name
= "no-statement";
2294 end_code
= TREE_CODE (last
);
2295 end_name
= tree_code_name
[end_code
];
2296 end_line
= get_lineno (last
);
2299 end_name
= "no-statement";
2301 fprintf (file
, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2302 bb
->index
, bb
->index
, head_name
, head_line
, end_name
,
2305 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2307 if (e
->dest
== EXIT_BLOCK_PTR
)
2308 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb
->index
);
2310 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb
->index
, e
->dest
->index
);
2312 if (e
->flags
& EDGE_FAKE
)
2313 fprintf (file
, " priority: 10 linestyle: dotted");
2315 fprintf (file
, " priority: 100 linestyle: solid");
2317 fprintf (file
, " }\n");
2320 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2324 fputs ("}\n\n", file
);
2329 /*---------------------------------------------------------------------------
2330 Miscellaneous helpers
2331 ---------------------------------------------------------------------------*/
2333 /* Return true if T represents a stmt that always transfers control. */
2336 is_ctrl_stmt (tree t
)
2338 return (TREE_CODE (t
) == COND_EXPR
2339 || TREE_CODE (t
) == SWITCH_EXPR
2340 || TREE_CODE (t
) == GOTO_EXPR
2341 || TREE_CODE (t
) == RETURN_EXPR
2342 || TREE_CODE (t
) == RESX_EXPR
);
2346 /* Return true if T is a statement that may alter the flow of control
2347 (e.g., a call to a non-returning function). */
2350 is_ctrl_altering_stmt (tree t
)
2355 call
= get_call_expr_in (t
);
2358 /* A non-pure/const CALL_EXPR alters flow control if the current
2359 function has nonlocal labels. */
2360 if (TREE_SIDE_EFFECTS (call
) && current_function_has_nonlocal_label
)
2363 /* A CALL_EXPR also alters control flow if it does not return. */
2364 if (call_expr_flags (call
) & (ECF_NORETURN
| ECF_LONGJMP
))
2368 /* If a statement can throw, it alters control flow. */
2369 return tree_can_throw_internal (t
);
2373 /* Return true if T is a computed goto. */
2376 computed_goto_p (tree t
)
2378 return (TREE_CODE (t
) == GOTO_EXPR
2379 && TREE_CODE (GOTO_DESTINATION (t
)) != LABEL_DECL
);
2383 /* Checks whether EXPR is a simple local goto. */
2386 simple_goto_p (tree expr
)
2388 return (TREE_CODE (expr
) == GOTO_EXPR
2389 && TREE_CODE (GOTO_DESTINATION (expr
)) == LABEL_DECL
);
2393 /* Return true if T should start a new basic block. PREV_T is the
2394 statement preceding T. It is used when T is a label or a case label.
2395 Labels should only start a new basic block if their previous statement
2396 wasn't a label. Otherwise, sequence of labels would generate
2397 unnecessary basic blocks that only contain a single label. */
2400 stmt_starts_bb_p (tree t
, tree prev_t
)
2402 enum tree_code code
;
2407 /* LABEL_EXPRs start a new basic block only if the preceding
2408 statement wasn't a label of the same type. This prevents the
2409 creation of consecutive blocks that have nothing but a single
2411 code
= TREE_CODE (t
);
2412 if (code
== LABEL_EXPR
)
2414 /* Nonlocal and computed GOTO targets always start a new block. */
2415 if (code
== LABEL_EXPR
2416 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t
))
2417 || FORCED_LABEL (LABEL_EXPR_LABEL (t
))))
2420 if (prev_t
&& TREE_CODE (prev_t
) == code
)
2422 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t
)))
2425 cfg_stats
.num_merged_labels
++;
2436 /* Return true if T should end a basic block. */
2439 stmt_ends_bb_p (tree t
)
2441 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2445 /* Add gotos that used to be represented implicitly in the CFG. */
2448 disband_implicit_edges (void)
2451 block_stmt_iterator last
;
2458 last
= bsi_last (bb
);
2459 stmt
= last_stmt (bb
);
2461 if (stmt
&& TREE_CODE (stmt
) == COND_EXPR
)
2463 /* Remove superfluous gotos from COND_EXPR branches. Moved
2464 from cfg_remove_useless_stmts here since it violates the
2465 invariants for tree--cfg correspondence and thus fits better
2466 here where we do it anyway. */
2467 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2469 if (e
->dest
!= bb
->next_bb
)
2472 if (e
->flags
& EDGE_TRUE_VALUE
)
2473 COND_EXPR_THEN (stmt
) = build_empty_stmt ();
2474 else if (e
->flags
& EDGE_FALSE_VALUE
)
2475 COND_EXPR_ELSE (stmt
) = build_empty_stmt ();
2478 e
->flags
|= EDGE_FALLTHRU
;
2484 if (stmt
&& TREE_CODE (stmt
) == RETURN_EXPR
)
2486 /* Remove the RETURN_EXPR if we may fall though to the exit
2488 gcc_assert (EDGE_COUNT (bb
->succs
) == 1);
2489 gcc_assert (EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
);
2491 if (bb
->next_bb
== EXIT_BLOCK_PTR
2492 && !TREE_OPERAND (stmt
, 0))
2495 EDGE_SUCC (bb
, 0)->flags
|= EDGE_FALLTHRU
;
2500 /* There can be no fallthru edge if the last statement is a control
2502 if (stmt
&& is_ctrl_stmt (stmt
))
2505 /* Find a fallthru edge and emit the goto if necessary. */
2506 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2507 if (e
->flags
& EDGE_FALLTHRU
)
2510 if (!e
|| e
->dest
== bb
->next_bb
)
2513 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
2514 label
= tree_block_label (e
->dest
);
2516 stmt
= build1 (GOTO_EXPR
, void_type_node
, label
);
2517 #ifdef USE_MAPPED_LOCATION
2518 SET_EXPR_LOCATION (stmt
, e
->goto_locus
);
2520 SET_EXPR_LOCUS (stmt
, e
->goto_locus
);
2522 bsi_insert_after (&last
, stmt
, BSI_NEW_STMT
);
2523 e
->flags
&= ~EDGE_FALLTHRU
;
2527 /* Remove block annotations and other datastructures. */
2530 delete_tree_cfg_annotations (void)
2533 if (n_basic_blocks
> 0)
2534 free_blocks_annotations ();
2536 label_to_block_map
= NULL
;
2543 /* Return the first statement in basic block BB. */
2546 first_stmt (basic_block bb
)
2548 block_stmt_iterator i
= bsi_start (bb
);
2549 return !bsi_end_p (i
) ? bsi_stmt (i
) : NULL_TREE
;
2553 /* Return the last statement in basic block BB. */
2556 last_stmt (basic_block bb
)
2558 block_stmt_iterator b
= bsi_last (bb
);
2559 return !bsi_end_p (b
) ? bsi_stmt (b
) : NULL_TREE
;
2563 /* Return a pointer to the last statement in block BB. */
2566 last_stmt_ptr (basic_block bb
)
2568 block_stmt_iterator last
= bsi_last (bb
);
2569 return !bsi_end_p (last
) ? bsi_stmt_ptr (last
) : NULL
;
2573 /* Return the last statement of an otherwise empty block. Return NULL
2574 if the block is totally empty, or if it contains more than one
2578 last_and_only_stmt (basic_block bb
)
2580 block_stmt_iterator i
= bsi_last (bb
);
2586 last
= bsi_stmt (i
);
2591 /* Empty statements should no longer appear in the instruction stream.
2592 Everything that might have appeared before should be deleted by
2593 remove_useless_stmts, and the optimizers should just bsi_remove
2594 instead of smashing with build_empty_stmt.
2596 Thus the only thing that should appear here in a block containing
2597 one executable statement is a label. */
2598 prev
= bsi_stmt (i
);
2599 if (TREE_CODE (prev
) == LABEL_EXPR
)
2606 /* Mark BB as the basic block holding statement T. */
2609 set_bb_for_stmt (tree t
, basic_block bb
)
2611 if (TREE_CODE (t
) == PHI_NODE
)
2613 else if (TREE_CODE (t
) == STATEMENT_LIST
)
2615 tree_stmt_iterator i
;
2616 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2617 set_bb_for_stmt (tsi_stmt (i
), bb
);
2621 stmt_ann_t ann
= get_stmt_ann (t
);
2624 /* If the statement is a label, add the label to block-to-labels map
2625 so that we can speed up edge creation for GOTO_EXPRs. */
2626 if (TREE_CODE (t
) == LABEL_EXPR
)
2630 t
= LABEL_EXPR_LABEL (t
);
2631 uid
= LABEL_DECL_UID (t
);
2634 LABEL_DECL_UID (t
) = uid
= cfun
->last_label_uid
++;
2635 if (VARRAY_SIZE (label_to_block_map
) <= (unsigned) uid
)
2636 VARRAY_GROW (label_to_block_map
, 3 * uid
/ 2);
2639 /* We're moving an existing label. Make sure that we've
2640 removed it from the old block. */
2641 gcc_assert (!bb
|| !VARRAY_BB (label_to_block_map
, uid
));
2642 VARRAY_BB (label_to_block_map
, uid
) = bb
;
2647 /* Finds iterator for STMT. */
2649 extern block_stmt_iterator
2650 bsi_for_stmt (tree stmt
)
2652 block_stmt_iterator bsi
;
2654 for (bsi
= bsi_start (bb_for_stmt (stmt
)); !bsi_end_p (bsi
); bsi_next (&bsi
))
2655 if (bsi_stmt (bsi
) == stmt
)
2661 /* Insert statement (or statement list) T before the statement
2662 pointed-to by iterator I. M specifies how to update iterator I
2663 after insertion (see enum bsi_iterator_update). */
2666 bsi_insert_before (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2668 set_bb_for_stmt (t
, i
->bb
);
2669 tsi_link_before (&i
->tsi
, t
, m
);
2674 /* Insert statement (or statement list) T after the statement
2675 pointed-to by iterator I. M specifies how to update iterator I
2676 after insertion (see enum bsi_iterator_update). */
2679 bsi_insert_after (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2681 set_bb_for_stmt (t
, i
->bb
);
2682 tsi_link_after (&i
->tsi
, t
, m
);
2687 /* Remove the statement pointed to by iterator I. The iterator is updated
2688 to the next statement. */
2691 bsi_remove (block_stmt_iterator
*i
)
2693 tree t
= bsi_stmt (*i
);
2694 set_bb_for_stmt (t
, NULL
);
2695 tsi_delink (&i
->tsi
);
2699 /* Move the statement at FROM so it comes right after the statement at TO. */
2702 bsi_move_after (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2704 tree stmt
= bsi_stmt (*from
);
2706 bsi_insert_after (to
, stmt
, BSI_SAME_STMT
);
2710 /* Move the statement at FROM so it comes right before the statement at TO. */
2713 bsi_move_before (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2715 tree stmt
= bsi_stmt (*from
);
2717 bsi_insert_before (to
, stmt
, BSI_SAME_STMT
);
2721 /* Move the statement at FROM to the end of basic block BB. */
2724 bsi_move_to_bb_end (block_stmt_iterator
*from
, basic_block bb
)
2726 block_stmt_iterator last
= bsi_last (bb
);
2728 /* Have to check bsi_end_p because it could be an empty block. */
2729 if (!bsi_end_p (last
) && is_ctrl_stmt (bsi_stmt (last
)))
2730 bsi_move_before (from
, &last
);
2732 bsi_move_after (from
, &last
);
2736 /* Replace the contents of the statement pointed to by iterator BSI
2737 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2738 information of the original statement is preserved. */
2741 bsi_replace (const block_stmt_iterator
*bsi
, tree stmt
, bool preserve_eh_info
)
2744 tree orig_stmt
= bsi_stmt (*bsi
);
2746 SET_EXPR_LOCUS (stmt
, EXPR_LOCUS (orig_stmt
));
2747 set_bb_for_stmt (stmt
, bsi
->bb
);
2749 /* Preserve EH region information from the original statement, if
2750 requested by the caller. */
2751 if (preserve_eh_info
)
2753 eh_region
= lookup_stmt_eh_region (orig_stmt
);
2755 add_stmt_to_eh_region (stmt
, eh_region
);
2758 *bsi_stmt_ptr (*bsi
) = stmt
;
2763 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2764 is made to place the statement in an existing basic block, but
2765 sometimes that isn't possible. When it isn't possible, the edge is
2766 split and the statement is added to the new block.
2768 In all cases, the returned *BSI points to the correct location. The
2769 return value is true if insertion should be done after the location,
2770 or false if it should be done before the location. If new basic block
2771 has to be created, it is stored in *NEW_BB. */
2774 tree_find_edge_insert_loc (edge e
, block_stmt_iterator
*bsi
,
2775 basic_block
*new_bb
)
2777 basic_block dest
, src
;
2783 /* If the destination has one predecessor which has no PHI nodes,
2784 insert there. Except for the exit block.
2786 The requirement for no PHI nodes could be relaxed. Basically we
2787 would have to examine the PHIs to prove that none of them used
2788 the value set by the statement we want to insert on E. That
2789 hardly seems worth the effort. */
2790 if (EDGE_COUNT (dest
->preds
) == 1
2791 && ! phi_nodes (dest
)
2792 && dest
!= EXIT_BLOCK_PTR
)
2794 *bsi
= bsi_start (dest
);
2795 if (bsi_end_p (*bsi
))
2798 /* Make sure we insert after any leading labels. */
2799 tmp
= bsi_stmt (*bsi
);
2800 while (TREE_CODE (tmp
) == LABEL_EXPR
)
2803 if (bsi_end_p (*bsi
))
2805 tmp
= bsi_stmt (*bsi
);
2808 if (bsi_end_p (*bsi
))
2810 *bsi
= bsi_last (dest
);
2817 /* If the source has one successor, the edge is not abnormal and
2818 the last statement does not end a basic block, insert there.
2819 Except for the entry block. */
2821 if ((e
->flags
& EDGE_ABNORMAL
) == 0
2822 && EDGE_COUNT (src
->succs
) == 1
2823 && src
!= ENTRY_BLOCK_PTR
)
2825 *bsi
= bsi_last (src
);
2826 if (bsi_end_p (*bsi
))
2829 tmp
= bsi_stmt (*bsi
);
2830 if (!stmt_ends_bb_p (tmp
))
2833 /* Insert code just before returning the value. We may need to decompose
2834 the return in the case it contains non-trivial operand. */
2835 if (TREE_CODE (tmp
) == RETURN_EXPR
)
2837 tree op
= TREE_OPERAND (tmp
, 0);
2838 if (!is_gimple_val (op
))
2840 gcc_assert (TREE_CODE (op
) == MODIFY_EXPR
);
2841 bsi_insert_before (bsi
, op
, BSI_NEW_STMT
);
2842 TREE_OPERAND (tmp
, 0) = TREE_OPERAND (op
, 0);
2849 /* Otherwise, create a new basic block, and split this edge. */
2850 dest
= split_edge (e
);
2853 e
= EDGE_PRED (dest
, 0);
2858 /* This routine will commit all pending edge insertions, creating any new
2859 basic blocks which are necessary.
2861 If specified, NEW_BLOCKS returns a count of the number of new basic
2862 blocks which were created. */
2865 bsi_commit_edge_inserts (int *new_blocks
)
2872 blocks
= n_basic_blocks
;
2874 bsi_commit_edge_inserts_1 (EDGE_SUCC (ENTRY_BLOCK_PTR
, 0));
2877 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2878 bsi_commit_edge_inserts_1 (e
);
2881 *new_blocks
= n_basic_blocks
- blocks
;
2885 /* Commit insertions pending at edge E. */
2888 bsi_commit_edge_inserts_1 (edge e
)
2890 if (PENDING_STMT (e
))
2892 block_stmt_iterator bsi
;
2893 tree stmt
= PENDING_STMT (e
);
2895 PENDING_STMT (e
) = NULL_TREE
;
2897 if (tree_find_edge_insert_loc (e
, &bsi
, NULL
))
2898 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
2900 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
2905 /* Add STMT to the pending list of edge E. No actual insertion is
2906 made until a call to bsi_commit_edge_inserts () is made. */
2909 bsi_insert_on_edge (edge e
, tree stmt
)
2911 append_to_statement_list (stmt
, &PENDING_STMT (e
));
2914 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
2915 be created, it is returned. */
2918 bsi_insert_on_edge_immediate (edge e
, tree stmt
)
2920 block_stmt_iterator bsi
;
2921 basic_block new_bb
= NULL
;
2923 gcc_assert (!PENDING_STMT (e
));
2925 if (tree_find_edge_insert_loc (e
, &bsi
, &new_bb
))
2926 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
2928 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
2933 /*---------------------------------------------------------------------------
2934 Tree specific functions for CFG manipulation
2935 ---------------------------------------------------------------------------*/
2937 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2938 Abort on abnormal edges. */
2941 tree_split_edge (edge edge_in
)
2943 basic_block new_bb
, after_bb
, dest
, src
;
2949 /* Abnormal edges cannot be split. */
2950 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2953 dest
= edge_in
->dest
;
2955 /* Place the new block in the block list. Try to keep the new block
2956 near its "logical" location. This is of most help to humans looking
2957 at debugging dumps. */
2958 FOR_EACH_EDGE (e
, ei
, dest
->preds
)
2959 if (e
->src
->next_bb
== dest
)
2962 after_bb
= dest
->prev_bb
;
2964 after_bb
= edge_in
->src
;
2966 new_bb
= create_empty_bb (after_bb
);
2967 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2968 new_bb
->count
= edge_in
->count
;
2969 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2970 new_edge
->probability
= REG_BR_PROB_BASE
;
2971 new_edge
->count
= edge_in
->count
;
2973 /* Find all the PHI arguments on the original edge, and change them to
2974 the new edge. Do it before redirection, so that the argument does not
2976 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
2978 num_elem
= PHI_NUM_ARGS (phi
);
2979 for (i
= 0; i
< num_elem
; i
++)
2980 if (PHI_ARG_EDGE (phi
, i
) == edge_in
)
2982 PHI_ARG_EDGE (phi
, i
) = new_edge
;
2987 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2989 gcc_assert (!PENDING_STMT (edge_in
));
2995 /* Return true when BB has label LABEL in it. */
2998 has_label_p (basic_block bb
, tree label
)
3000 block_stmt_iterator bsi
;
3002 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3004 tree stmt
= bsi_stmt (bsi
);
3006 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3008 if (LABEL_EXPR_LABEL (stmt
) == label
)
3015 /* Callback for walk_tree, check that all elements with address taken are
3016 properly noticed as such. */
3019 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3026 /* Check operand N for being valid GIMPLE and give error MSG if not.
3027 We check for constants explicitly since they are not considered
3028 gimple invariants if they overflowed. */
3029 #define CHECK_OP(N, MSG) \
3030 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3031 && !is_gimple_val (TREE_OPERAND (t, N))) \
3032 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3034 switch (TREE_CODE (t
))
3037 if (SSA_NAME_IN_FREE_LIST (t
))
3039 error ("SSA name in freelist but still referenced");
3045 x
= TREE_OPERAND (t
, 0);
3046 if (TREE_CODE (x
) == BIT_FIELD_REF
3047 && is_gimple_reg (TREE_OPERAND (x
, 0)))
3049 error ("GIMPLE register modified with BIT_FIELD_REF");
3055 /* Skip any references (they will be checked when we recurse down the
3056 tree) and ensure that any variable used as a prefix is marked
3058 for (x
= TREE_OPERAND (t
, 0);
3059 (handled_component_p (x
)
3060 || TREE_CODE (x
) == REALPART_EXPR
3061 || TREE_CODE (x
) == IMAGPART_EXPR
);
3062 x
= TREE_OPERAND (x
, 0))
3065 if (TREE_CODE (x
) != VAR_DECL
&& TREE_CODE (x
) != PARM_DECL
)
3067 if (!TREE_ADDRESSABLE (x
))
3069 error ("address taken, but ADDRESSABLE bit not set");
3075 x
= TREE_OPERAND (t
, 0);
3076 if (TREE_CODE (TREE_TYPE (x
)) != BOOLEAN_TYPE
)
3078 error ("non-boolean used in condition");
3085 case FIX_TRUNC_EXPR
:
3087 case FIX_FLOOR_EXPR
:
3088 case FIX_ROUND_EXPR
:
3093 case NON_LVALUE_EXPR
:
3094 case TRUTH_NOT_EXPR
:
3095 CHECK_OP (0, "Invalid operand to unary operator");
3102 case ARRAY_RANGE_REF
:
3104 case VIEW_CONVERT_EXPR
:
3105 /* We have a nest of references. Verify that each of the operands
3106 that determine where to reference is either a constant or a variable,
3107 verify that the base is valid, and then show we've already checked
3109 while (TREE_CODE (t
) == REALPART_EXPR
|| TREE_CODE (t
) == IMAGPART_EXPR
3110 || handled_component_p (t
))
3112 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3113 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3114 else if (TREE_CODE (t
) == ARRAY_REF
3115 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3117 CHECK_OP (1, "Invalid array index.");
3118 if (TREE_OPERAND (t
, 2))
3119 CHECK_OP (2, "Invalid array lower bound.");
3120 if (TREE_OPERAND (t
, 3))
3121 CHECK_OP (3, "Invalid array stride.");
3123 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
3125 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3126 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3129 t
= TREE_OPERAND (t
, 0);
3132 if (!CONSTANT_CLASS_P (t
) && !is_gimple_lvalue (t
))
3134 error ("Invalid reference prefix.");
3146 case UNORDERED_EXPR
:
3157 case TRUNC_DIV_EXPR
:
3159 case FLOOR_DIV_EXPR
:
3160 case ROUND_DIV_EXPR
:
3161 case TRUNC_MOD_EXPR
:
3163 case FLOOR_MOD_EXPR
:
3164 case ROUND_MOD_EXPR
:
3166 case EXACT_DIV_EXPR
:
3176 CHECK_OP (0, "Invalid operand to binary operator");
3177 CHECK_OP (1, "Invalid operand to binary operator");
3189 /* Verify STMT, return true if STMT is not in GIMPLE form.
3190 TODO: Implement type checking. */
3193 verify_stmt (tree stmt
, bool last_in_block
)
3197 if (!is_gimple_stmt (stmt
))
3199 error ("Is not a valid GIMPLE statement.");
3203 addr
= walk_tree (&stmt
, verify_expr
, NULL
, NULL
);
3206 debug_generic_stmt (addr
);
3210 /* If the statement is marked as part of an EH region, then it is
3211 expected that the statement could throw. Verify that when we
3212 have optimizations that simplify statements such that we prove
3213 that they cannot throw, that we update other data structures
3215 if (lookup_stmt_eh_region (stmt
) >= 0)
3217 if (!tree_could_throw_p (stmt
))
3219 error ("Statement marked for throw, but doesn%'t.");
3222 if (!last_in_block
&& tree_can_throw_internal (stmt
))
3224 error ("Statement marked for throw in middle of block.");
3232 debug_generic_stmt (stmt
);
3237 /* Return true when the T can be shared. */
3240 tree_node_can_be_shared (tree t
)
3242 if (IS_TYPE_OR_DECL_P (t
)
3243 /* We check for constants explicitly since they are not considered
3244 gimple invariants if they overflowed. */
3245 || CONSTANT_CLASS_P (t
)
3246 || is_gimple_min_invariant (t
)
3247 || TREE_CODE (t
) == SSA_NAME
)
3250 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3251 /* We check for constants explicitly since they are not considered
3252 gimple invariants if they overflowed. */
3253 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 1))
3254 || is_gimple_min_invariant (TREE_OPERAND (t
, 1))))
3255 || (TREE_CODE (t
) == COMPONENT_REF
3256 || TREE_CODE (t
) == REALPART_EXPR
3257 || TREE_CODE (t
) == IMAGPART_EXPR
))
3258 t
= TREE_OPERAND (t
, 0);
3267 /* Called via walk_trees. Verify tree sharing. */
3270 verify_node_sharing (tree
* tp
, int *walk_subtrees
, void *data
)
3272 htab_t htab
= (htab_t
) data
;
3275 if (tree_node_can_be_shared (*tp
))
3277 *walk_subtrees
= false;
3281 slot
= htab_find_slot (htab
, *tp
, INSERT
);
3290 /* Verify the GIMPLE statement chain. */
3296 block_stmt_iterator bsi
;
3301 timevar_push (TV_TREE_STMT_VERIFY
);
3302 htab
= htab_create (37, htab_hash_pointer
, htab_eq_pointer
, NULL
);
3309 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
3311 int phi_num_args
= PHI_NUM_ARGS (phi
);
3313 for (i
= 0; i
< phi_num_args
; i
++)
3315 tree t
= PHI_ARG_DEF (phi
, i
);
3318 /* Addressable variables do have SSA_NAMEs but they
3319 are not considered gimple values. */
3320 if (TREE_CODE (t
) != SSA_NAME
3321 && TREE_CODE (t
) != FUNCTION_DECL
3322 && !is_gimple_val (t
))
3324 error ("PHI def is not a GIMPLE value");
3325 debug_generic_stmt (phi
);
3326 debug_generic_stmt (t
);
3330 addr
= walk_tree (&t
, verify_expr
, NULL
, NULL
);
3333 debug_generic_stmt (addr
);
3337 addr
= walk_tree (&t
, verify_node_sharing
, htab
, NULL
);
3340 error ("Incorrect sharing of tree nodes");
3341 debug_generic_stmt (phi
);
3342 debug_generic_stmt (addr
);
3348 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
3350 tree stmt
= bsi_stmt (bsi
);
3352 err
|= verify_stmt (stmt
, bsi_end_p (bsi
));
3353 addr
= walk_tree (&stmt
, verify_node_sharing
, htab
, NULL
);
3356 error ("Incorrect sharing of tree nodes");
3357 debug_generic_stmt (stmt
);
3358 debug_generic_stmt (addr
);
3365 internal_error ("verify_stmts failed.");
3368 timevar_pop (TV_TREE_STMT_VERIFY
);
3372 /* Verifies that the flow information is OK. */
3375 tree_verify_flow_info (void)
3379 block_stmt_iterator bsi
;
3384 if (ENTRY_BLOCK_PTR
->stmt_list
)
3386 error ("ENTRY_BLOCK has a statement list associated with it\n");
3390 if (EXIT_BLOCK_PTR
->stmt_list
)
3392 error ("EXIT_BLOCK has a statement list associated with it\n");
3396 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
3397 if (e
->flags
& EDGE_FALLTHRU
)
3399 error ("Fallthru to exit from bb %d\n", e
->src
->index
);
3405 bool found_ctrl_stmt
= false;
3407 /* Skip labels on the start of basic block. */
3408 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3410 if (TREE_CODE (bsi_stmt (bsi
)) != LABEL_EXPR
)
3413 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi
))) != bb
)
3415 tree stmt
= bsi_stmt (bsi
);
3416 error ("Label %s to block does not match in bb %d\n",
3417 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3422 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi
)))
3423 != current_function_decl
)
3425 tree stmt
= bsi_stmt (bsi
);
3426 error ("Label %s has incorrect context in bb %d\n",
3427 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3433 /* Verify that body of basic block BB is free of control flow. */
3434 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
3436 tree stmt
= bsi_stmt (bsi
);
3438 if (found_ctrl_stmt
)
3440 error ("Control flow in the middle of basic block %d\n",
3445 if (stmt_ends_bb_p (stmt
))
3446 found_ctrl_stmt
= true;
3448 if (TREE_CODE (stmt
) == LABEL_EXPR
)
3450 error ("Label %s in the middle of basic block %d\n",
3451 IDENTIFIER_POINTER (DECL_NAME (stmt
)),
3456 bsi
= bsi_last (bb
);
3457 if (bsi_end_p (bsi
))
3460 stmt
= bsi_stmt (bsi
);
3462 if (is_ctrl_stmt (stmt
))
3464 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3465 if (e
->flags
& EDGE_FALLTHRU
)
3467 error ("Fallthru edge after a control statement in bb %d \n",
3473 switch (TREE_CODE (stmt
))
3479 if (TREE_CODE (COND_EXPR_THEN (stmt
)) != GOTO_EXPR
3480 || TREE_CODE (COND_EXPR_ELSE (stmt
)) != GOTO_EXPR
)
3482 error ("Structured COND_EXPR at the end of bb %d\n", bb
->index
);
3486 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
3488 if (!true_edge
|| !false_edge
3489 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
3490 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
3491 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3492 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3493 || EDGE_COUNT (bb
->succs
) >= 3)
3495 error ("Wrong outgoing edge flags at end of bb %d\n",
3500 if (!has_label_p (true_edge
->dest
,
3501 GOTO_DESTINATION (COND_EXPR_THEN (stmt
))))
3503 error ("%<then%> label does not match edge at end of bb %d\n",
3508 if (!has_label_p (false_edge
->dest
,
3509 GOTO_DESTINATION (COND_EXPR_ELSE (stmt
))))
3511 error ("%<else%> label does not match edge at end of bb %d\n",
3519 if (simple_goto_p (stmt
))
3521 error ("Explicit goto at end of bb %d\n", bb
->index
);
3526 /* FIXME. We should double check that the labels in the
3527 destination blocks have their address taken. */
3528 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3529 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
3530 | EDGE_FALSE_VALUE
))
3531 || !(e
->flags
& EDGE_ABNORMAL
))
3533 error ("Wrong outgoing edge flags at end of bb %d\n",
3541 if (EDGE_COUNT (bb
->succs
) != 1
3542 || (EDGE_SUCC (bb
, 0)->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3543 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3545 error ("Wrong outgoing edge flags at end of bb %d\n", bb
->index
);
3548 if (EDGE_SUCC (bb
, 0)->dest
!= EXIT_BLOCK_PTR
)
3550 error ("Return edge does not point to exit in bb %d\n",
3563 vec
= SWITCH_LABELS (stmt
);
3564 n
= TREE_VEC_LENGTH (vec
);
3566 /* Mark all the destination basic blocks. */
3567 for (i
= 0; i
< n
; ++i
)
3569 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3570 basic_block label_bb
= label_to_block (lab
);
3572 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
3573 label_bb
->aux
= (void *)1;
3576 /* Verify that the case labels are sorted. */
3577 prev
= TREE_VEC_ELT (vec
, 0);
3578 for (i
= 1; i
< n
- 1; ++i
)
3580 tree c
= TREE_VEC_ELT (vec
, i
);
3583 error ("Found default case not at end of case vector");
3587 if (! tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
3589 error ("Case labels not sorted:\n ");
3590 print_generic_expr (stderr
, prev
, 0);
3591 fprintf (stderr
," is greater than ");
3592 print_generic_expr (stderr
, c
, 0);
3593 fprintf (stderr
," but comes before it.\n");
3598 if (CASE_LOW (TREE_VEC_ELT (vec
, n
- 1)))
3600 error ("No default case found at end of case vector");
3604 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3608 error ("Extra outgoing edge %d->%d\n",
3609 bb
->index
, e
->dest
->index
);
3612 e
->dest
->aux
= (void *)2;
3613 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3614 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3616 error ("Wrong outgoing edge flags at end of bb %d\n",
3622 /* Check that we have all of them. */
3623 for (i
= 0; i
< n
; ++i
)
3625 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3626 basic_block label_bb
= label_to_block (lab
);
3628 if (label_bb
->aux
!= (void *)2)
3630 error ("Missing edge %i->%i\n",
3631 bb
->index
, label_bb
->index
);
3636 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3637 e
->dest
->aux
= (void *)0;
3644 if (dom_computed
[CDI_DOMINATORS
] >= DOM_NO_FAST_QUERY
)
3645 verify_dominators (CDI_DOMINATORS
);
3651 /* Updates phi nodes after creating a forwarder block joined
3652 by edge FALLTHRU. */
3655 tree_make_forwarder_block (edge fallthru
)
3659 basic_block dummy
, bb
;
3660 tree phi
, new_phi
, var
, prev
, next
;
3662 dummy
= fallthru
->src
;
3663 bb
= fallthru
->dest
;
3665 if (EDGE_COUNT (bb
->preds
) == 1)
3668 /* If we redirected a branch we must create new phi nodes at the
3670 for (phi
= phi_nodes (dummy
); phi
; phi
= PHI_CHAIN (phi
))
3672 var
= PHI_RESULT (phi
);
3673 new_phi
= create_phi_node (var
, bb
);
3674 SSA_NAME_DEF_STMT (var
) = new_phi
;
3675 SET_PHI_RESULT (phi
, make_ssa_name (SSA_NAME_VAR (var
), phi
));
3676 add_phi_arg (&new_phi
, PHI_RESULT (phi
), fallthru
);
3679 /* Ensure that the PHI node chain is in the same order. */
3681 for (phi
= phi_nodes (bb
); phi
; phi
= next
)
3683 next
= PHI_CHAIN (phi
);
3684 PHI_CHAIN (phi
) = prev
;
3687 set_phi_nodes (bb
, prev
);
3689 /* Add the arguments we have stored on edges. */
3690 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3695 flush_pending_stmts (e
);
3700 /* Return true if basic block BB does nothing except pass control
3701 flow to another block and that we can safely insert a label at
3702 the start of the successor block.
3704 As a precondition, we require that BB be not equal to
3708 tree_forwarder_block_p (basic_block bb
)
3710 block_stmt_iterator bsi
;
3714 /* BB must have a single outgoing edge. */
3715 if (EDGE_COUNT (bb
->succs
) != 1
3716 /* BB can not have any PHI nodes. This could potentially be
3717 relaxed early in compilation if we re-rewrote the variables
3718 appearing in any PHI nodes in forwarder blocks. */
3720 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3721 || EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
3722 /* BB may not have an abnormal outgoing edge. */
3723 || (EDGE_SUCC (bb
, 0)->flags
& EDGE_ABNORMAL
))
3727 gcc_assert (bb
!= ENTRY_BLOCK_PTR
);
3730 /* Successors of the entry block are not forwarders. */
3731 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
3735 /* Now walk through the statements. We can ignore labels, anything else
3736 means this is not a forwarder block. */
3737 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3739 tree stmt
= bsi_stmt (bsi
);
3741 switch (TREE_CODE (stmt
))
3744 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
3756 /* Thread jumps from BB. */
3759 thread_jumps_from_bb (basic_block bb
)
3763 bool retval
= false;
3765 /* Examine each of our block's successors to see if it is
3767 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3772 basic_block dest
, tmp
, curr
, old_dest
;
3776 /* If the edge is abnormal or its destination is not
3777 forwardable, then there's nothing to do. */
3778 if ((e
->flags
& EDGE_ABNORMAL
)
3779 || !bb_ann (e
->dest
)->forwardable
)
3785 /* Now walk through as many forwarder blocks as possible to find
3786 the ultimate destination we want to thread our jump to. */
3787 last
= EDGE_SUCC (e
->dest
, 0);
3788 bb_ann (e
->dest
)->forwardable
= 0;
3789 for (dest
= EDGE_SUCC (e
->dest
, 0)->dest
;
3790 bb_ann (dest
)->forwardable
;
3791 last
= EDGE_SUCC (dest
, 0),
3792 dest
= EDGE_SUCC (dest
, 0)->dest
)
3793 bb_ann (dest
)->forwardable
= 0;
3795 /* Reset the forwardable marks to 1. */
3798 tmp
= EDGE_SUCC (tmp
, 0)->dest
)
3799 bb_ann (tmp
)->forwardable
= 1;
3801 if (dest
== e
->dest
)
3807 old
= find_edge (bb
, dest
);
3810 /* If there already is an edge, check whether the values in
3811 phi nodes differ. */
3812 if (!phi_alternatives_equal (dest
, last
, old
))
3814 /* The previous block is forwarder. Redirect our jump
3815 to that target instead since we know it has no PHI
3816 nodes that will need updating. */
3819 /* That might mean that no forwarding at all is
3821 if (dest
== e
->dest
)
3827 old
= find_edge (bb
, dest
);
3831 /* Perform the redirection. */
3834 freq
= EDGE_FREQUENCY (e
);
3836 e
= redirect_edge_and_branch (e
, dest
);
3838 /* Update the profile. */
3839 if (profile_status
!= PROFILE_ABSENT
)
3840 for (curr
= old_dest
;
3842 curr
= EDGE_SUCC (curr
, 0)->dest
)
3844 curr
->frequency
-= freq
;
3845 if (curr
->frequency
< 0)
3846 curr
->frequency
= 0;
3847 curr
->count
-= count
;
3848 if (curr
->count
< 0)
3850 EDGE_SUCC (curr
, 0)->count
-= count
;
3851 if (EDGE_SUCC (curr
, 0)->count
< 0)
3852 EDGE_SUCC (curr
, 0)->count
= 0;
3857 /* Update PHI nodes. We know that the new argument should
3858 have the same value as the argument associated with LAST.
3859 Otherwise we would have changed our target block
3861 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
3863 arg
= phi_arg_from_edge (phi
, last
);
3864 gcc_assert (arg
>= 0);
3865 add_phi_arg (&phi
, PHI_ARG_DEF (phi
, arg
), e
);
3869 /* Remove the unreachable blocks (observe that if all blocks
3870 were reachable before, only those in the path we threaded
3871 over and did not have any predecessor outside of the path
3872 become unreachable). */
3873 for (; old_dest
!= dest
; old_dest
= tmp
)
3875 tmp
= EDGE_SUCC (old_dest
, 0)->dest
;
3877 if (EDGE_COUNT (old_dest
->preds
) > 0)
3880 delete_basic_block (old_dest
);
3883 /* Update the dominators. */
3884 if (dom_info_available_p (CDI_DOMINATORS
))
3886 /* If the dominator of the destination was in the
3887 path, set its dominator to the start of the
3889 if (get_immediate_dominator (CDI_DOMINATORS
, old_dest
) == NULL
)
3890 set_immediate_dominator (CDI_DOMINATORS
, old_dest
, bb
);
3892 /* Now proceed like if we forwarded just over one edge at a
3893 time. Algorithm for forwarding edge S --> A over
3894 edge A --> B then is
3897 && !dominated_by (S, B))
3898 idom (B) = idom (A);
3899 recount_idom (A); */
3901 for (; old_dest
!= dest
; old_dest
= tmp
)
3905 tmp
= EDGE_SUCC (old_dest
, 0)->dest
;
3907 if (get_immediate_dominator (CDI_DOMINATORS
, tmp
) == old_dest
3908 && !dominated_by_p (CDI_DOMINATORS
, bb
, tmp
))
3910 dom
= get_immediate_dominator (CDI_DOMINATORS
, old_dest
);
3911 set_immediate_dominator (CDI_DOMINATORS
, tmp
, dom
);
3914 dom
= recount_dominator (CDI_DOMINATORS
, old_dest
);
3915 set_immediate_dominator (CDI_DOMINATORS
, old_dest
, dom
);
3924 /* Thread jumps over empty statements.
3926 This code should _not_ thread over obviously equivalent conditions
3927 as that requires nontrivial updates to the SSA graph.
3929 As a precondition, we require that all basic blocks be reachable.
3930 That is, there should be no opportunities left for
3931 delete_unreachable_blocks. */
3937 bool retval
= false;
3938 basic_block
*worklist
= xmalloc (sizeof (basic_block
) * last_basic_block
);
3939 unsigned int size
= 0;
3943 bb_ann (bb
)->forwardable
= tree_forwarder_block_p (bb
);
3944 bb
->flags
&= ~BB_VISITED
;
3947 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
3948 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
3949 ENTRY_BLOCK_PTR
->flags
|= BB_VISITED
;
3951 /* Initialize WORKLIST by putting non-forwarder blocks that
3952 immediately precede forwarder blocks because those are the ones
3953 that we know we can thread jumps from. We use BB_VISITED to
3954 indicate whether a given basic block is in WORKLIST or not,
3955 thereby avoiding duplicates in WORKLIST. */
3961 /* We are not interested in finding non-forwarder blocks
3962 directly. We want to find non-forwarder blocks as
3963 predecessors of a forwarder block. */
3964 if (!bb_ann (bb
)->forwardable
)
3967 /* Now we know BB is a forwarder block. Visit each of its
3968 incoming edges and add to WORKLIST all non-forwarder blocks
3969 among BB's predecessors. */
3970 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3972 /* We don't want to put a duplicate into WORKLIST. */
3973 if ((e
->src
->flags
& BB_VISITED
) == 0
3974 /* We are not interested in threading jumps from a forwarder
3976 && !bb_ann (e
->src
)->forwardable
)
3978 e
->src
->flags
|= BB_VISITED
;
3979 worklist
[size
] = e
->src
;
3985 /* Now let's drain WORKLIST. */
3989 bb
= worklist
[size
];
3991 /* BB->INDEX is not longer in WORKLIST, so clear BB_VISITED. */
3992 bb
->flags
&= ~BB_VISITED
;
3994 if (thread_jumps_from_bb (bb
))
3998 if (tree_forwarder_block_p (bb
))
4003 bb_ann (bb
)->forwardable
= true;
4005 /* Attempts to thread through BB may have been blocked
4006 because BB was not a forwarder block before. Now
4007 that BB is a forwarder block, we should revisit BB's
4009 FOR_EACH_EDGE (f
, ej
, bb
->preds
)
4011 /* We don't want to put a duplicate into WORKLIST. */
4012 if ((f
->src
->flags
& BB_VISITED
) == 0
4013 /* We are not interested in threading jumps from a
4015 && !bb_ann (f
->src
)->forwardable
)
4017 f
->src
->flags
|= BB_VISITED
;
4018 worklist
[size
] = f
->src
;
4026 ENTRY_BLOCK_PTR
->flags
&= ~BB_VISITED
;
4034 /* Return a non-special label in the head of basic block BLOCK.
4035 Create one if it doesn't exist. */
4038 tree_block_label (basic_block bb
)
4040 block_stmt_iterator i
, s
= bsi_start (bb
);
4044 for (i
= s
; !bsi_end_p (i
); first
= false, bsi_next (&i
))
4046 stmt
= bsi_stmt (i
);
4047 if (TREE_CODE (stmt
) != LABEL_EXPR
)
4049 label
= LABEL_EXPR_LABEL (stmt
);
4050 if (!DECL_NONLOCAL (label
))
4053 bsi_move_before (&i
, &s
);
4058 label
= create_artificial_label ();
4059 stmt
= build1 (LABEL_EXPR
, void_type_node
, label
);
4060 bsi_insert_before (&s
, stmt
, BSI_NEW_STMT
);
4065 /* Attempt to perform edge redirection by replacing a possibly complex
4066 jump instruction by a goto or by removing the jump completely.
4067 This can apply only if all edges now point to the same block. The
4068 parameters and return values are equivalent to
4069 redirect_edge_and_branch. */
4072 tree_try_redirect_by_replacing_jump (edge e
, basic_block target
)
4074 basic_block src
= e
->src
;
4076 block_stmt_iterator b
;
4080 /* Verify that all targets will be TARGET. */
4081 FOR_EACH_EDGE (tmp
, ei
, src
->succs
)
4082 if (tmp
->dest
!= target
&& tmp
!= e
)
4091 stmt
= bsi_stmt (b
);
4093 if (TREE_CODE (stmt
) == COND_EXPR
4094 || TREE_CODE (stmt
) == SWITCH_EXPR
)
4097 e
= ssa_redirect_edge (e
, target
);
4098 e
->flags
= EDGE_FALLTHRU
;
4106 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4107 edge representing the redirected branch. */
4110 tree_redirect_edge_and_branch (edge e
, basic_block dest
)
4112 basic_block bb
= e
->src
;
4113 block_stmt_iterator bsi
;
4117 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4120 if (e
->src
!= ENTRY_BLOCK_PTR
4121 && (ret
= tree_try_redirect_by_replacing_jump (e
, dest
)))
4124 if (e
->dest
== dest
)
4127 label
= tree_block_label (dest
);
4129 bsi
= bsi_last (bb
);
4130 stmt
= bsi_end_p (bsi
) ? NULL
: bsi_stmt (bsi
);
4132 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
4135 stmt
= (e
->flags
& EDGE_TRUE_VALUE
4136 ? COND_EXPR_THEN (stmt
)
4137 : COND_EXPR_ELSE (stmt
));
4138 GOTO_DESTINATION (stmt
) = label
;
4142 /* No non-abnormal edges should lead from a non-simple goto, and
4143 simple ones should be represented implicitly. */
4148 tree vec
= SWITCH_LABELS (stmt
);
4149 size_t i
, n
= TREE_VEC_LENGTH (vec
);
4151 for (i
= 0; i
< n
; ++i
)
4153 tree elt
= TREE_VEC_ELT (vec
, i
);
4154 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
4155 CASE_LABEL (elt
) = label
;
4162 e
->flags
|= EDGE_FALLTHRU
;
4166 /* Otherwise it must be a fallthru edge, and we don't need to
4167 do anything besides redirecting it. */
4168 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
4172 /* Update/insert PHI nodes as necessary. */
4174 /* Now update the edges in the CFG. */
4175 e
= ssa_redirect_edge (e
, dest
);
4181 /* Simple wrapper, as we can always redirect fallthru edges. */
4184 tree_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4186 e
= tree_redirect_edge_and_branch (e
, dest
);
4193 /* Splits basic block BB after statement STMT (but at least after the
4194 labels). If STMT is NULL, BB is split just after the labels. */
4197 tree_split_block (basic_block bb
, void *stmt
)
4199 block_stmt_iterator bsi
, bsi_tgt
;
4205 new_bb
= create_empty_bb (bb
);
4207 /* Redirect the outgoing edges. */
4208 new_bb
->succs
= bb
->succs
;
4210 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
4213 if (stmt
&& TREE_CODE ((tree
) stmt
) == LABEL_EXPR
)
4216 /* Move everything from BSI to the new basic block. */
4217 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4219 act
= bsi_stmt (bsi
);
4220 if (TREE_CODE (act
) == LABEL_EXPR
)
4233 bsi_tgt
= bsi_start (new_bb
);
4234 while (!bsi_end_p (bsi
))
4236 act
= bsi_stmt (bsi
);
4238 bsi_insert_after (&bsi_tgt
, act
, BSI_NEW_STMT
);
4245 /* Moves basic block BB after block AFTER. */
4248 tree_move_block_after (basic_block bb
, basic_block after
)
4250 if (bb
->prev_bb
== after
)
4254 link_block (bb
, after
);
4260 /* Return true if basic_block can be duplicated. */
4263 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED
)
4268 /* Create a duplicate of the basic block BB. NOTE: This does not
4269 preserve SSA form. */
4272 tree_duplicate_bb (basic_block bb
)
4275 block_stmt_iterator bsi
, bsi_tgt
;
4277 ssa_op_iter op_iter
;
4279 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
4281 /* First copy the phi nodes. We do not copy phi node arguments here,
4282 since the edges are not ready yet. Keep the chain of phi nodes in
4283 the same order, so that we can add them later. */
4284 for (phi
= phi_nodes (bb
); phi
; phi
= TREE_CHAIN (phi
))
4286 mark_for_rewrite (PHI_RESULT (phi
));
4287 create_phi_node (PHI_RESULT (phi
), new_bb
);
4289 set_phi_nodes (new_bb
, nreverse (phi_nodes (new_bb
)));
4291 bsi_tgt
= bsi_start (new_bb
);
4292 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4294 tree stmt
= bsi_stmt (bsi
);
4297 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4300 /* Record the definitions. */
4301 get_stmt_operands (stmt
);
4303 FOR_EACH_SSA_TREE_OPERAND (val
, stmt
, op_iter
, SSA_OP_ALL_DEFS
)
4304 mark_for_rewrite (val
);
4306 copy
= unshare_expr (stmt
);
4308 /* Copy also the virtual operands. */
4309 get_stmt_ann (copy
);
4310 copy_virtual_operands (copy
, stmt
);
4312 bsi_insert_after (&bsi_tgt
, copy
, BSI_NEW_STMT
);
4318 /* Basic block BB_COPY was created by code duplication. Add phi node
4319 arguments for edges going out of BB_COPY. The blocks that were
4320 duplicated have rbi->duplicated set to one. */
4323 add_phi_args_after_copy_bb (basic_block bb_copy
)
4325 basic_block bb
, dest
;
4328 tree phi
, phi_copy
, phi_next
, def
;
4330 bb
= bb_copy
->rbi
->original
;
4332 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
4334 if (!phi_nodes (e_copy
->dest
))
4337 if (e_copy
->dest
->rbi
->duplicated
)
4338 dest
= e_copy
->dest
->rbi
->original
;
4340 dest
= e_copy
->dest
;
4342 e
= find_edge (bb
, dest
);
4345 /* During loop unrolling the target of the latch edge is copied.
4346 In this case we are not looking for edge to dest, but to
4347 duplicated block whose original was dest. */
4348 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4349 if (e
->dest
->rbi
->duplicated
4350 && e
->dest
->rbi
->original
== dest
)
4353 gcc_assert (e
!= NULL
);
4356 for (phi
= phi_nodes (e
->dest
), phi_copy
= phi_nodes (e_copy
->dest
);
4358 phi
= phi_next
, phi_copy
= TREE_CHAIN (phi_copy
))
4360 phi_next
= TREE_CHAIN (phi
);
4362 gcc_assert (PHI_RESULT (phi
) == PHI_RESULT (phi_copy
));
4363 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4364 add_phi_arg (&phi_copy
, def
, e_copy
);
4369 /* Blocks in REGION_COPY array of length N_REGION were created by
4370 duplication of basic blocks. Add phi node arguments for edges
4371 going from these blocks. */
4374 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
)
4378 for (i
= 0; i
< n_region
; i
++)
4379 region_copy
[i
]->rbi
->duplicated
= 1;
4381 for (i
= 0; i
< n_region
; i
++)
4382 add_phi_args_after_copy_bb (region_copy
[i
]);
4384 for (i
= 0; i
< n_region
; i
++)
4385 region_copy
[i
]->rbi
->duplicated
= 0;
4388 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4390 struct ssa_name_map_entry
4396 /* Hash function for ssa_name_map_entry. */
4399 ssa_name_map_entry_hash (const void *entry
)
4401 const struct ssa_name_map_entry
*en
= entry
;
4402 return SSA_NAME_VERSION (en
->from_name
);
4405 /* Equality function for ssa_name_map_entry. */
4408 ssa_name_map_entry_eq (const void *in_table
, const void *ssa_name
)
4410 const struct ssa_name_map_entry
*en
= in_table
;
4412 return en
->from_name
== ssa_name
;
4415 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4419 allocate_ssa_names (bitmap definitions
, htab_t
*map
)
4422 struct ssa_name_map_entry
*entry
;
4428 *map
= htab_create (10, ssa_name_map_entry_hash
,
4429 ssa_name_map_entry_eq
, free
);
4430 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4432 name
= ssa_name (ver
);
4433 slot
= htab_find_slot_with_hash (*map
, name
, SSA_NAME_VERSION (name
),
4439 entry
= xmalloc (sizeof (struct ssa_name_map_entry
));
4440 entry
->from_name
= name
;
4443 entry
->to_name
= duplicate_ssa_name (name
, SSA_NAME_DEF_STMT (name
));
4447 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4448 by the mapping MAP. */
4451 rewrite_to_new_ssa_names_def (def_operand_p def
, tree stmt
, htab_t map
)
4453 tree name
= DEF_FROM_PTR (def
);
4454 struct ssa_name_map_entry
*entry
;
4456 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
4458 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4462 SET_DEF (def
, entry
->to_name
);
4463 SSA_NAME_DEF_STMT (entry
->to_name
) = stmt
;
4466 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4469 rewrite_to_new_ssa_names_use (use_operand_p use
, htab_t map
)
4471 tree name
= USE_FROM_PTR (use
);
4472 struct ssa_name_map_entry
*entry
;
4474 if (TREE_CODE (name
) != SSA_NAME
)
4477 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4481 SET_USE (use
, entry
->to_name
);
4484 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4488 rewrite_to_new_ssa_names_bb (basic_block bb
, htab_t map
)
4494 block_stmt_iterator bsi
;
4498 v_may_def_optype v_may_defs
;
4499 v_must_def_optype v_must_defs
;
4502 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4503 if (e
->flags
& EDGE_ABNORMAL
)
4506 for (phi
= phi_nodes (bb
); phi
; phi
= TREE_CHAIN (phi
))
4508 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi
), phi
, map
);
4510 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)) = 1;
4513 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4515 stmt
= bsi_stmt (bsi
);
4516 get_stmt_operands (stmt
);
4517 ann
= stmt_ann (stmt
);
4519 uses
= USE_OPS (ann
);
4520 for (i
= 0; i
< NUM_USES (uses
); i
++)
4521 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses
, i
), map
);
4523 defs
= DEF_OPS (ann
);
4524 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
4525 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs
, i
), stmt
, map
);
4527 vuses
= VUSE_OPS (ann
);
4528 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
4529 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses
, i
), map
);
4531 v_may_defs
= V_MAY_DEF_OPS (ann
);
4532 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
4534 rewrite_to_new_ssa_names_use
4535 (V_MAY_DEF_OP_PTR (v_may_defs
, i
), map
);
4536 rewrite_to_new_ssa_names_def
4537 (V_MAY_DEF_RESULT_PTR (v_may_defs
, i
), stmt
, map
);
4540 v_must_defs
= V_MUST_DEF_OPS (ann
);
4541 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
4543 rewrite_to_new_ssa_names_def
4544 (V_MUST_DEF_RESULT_PTR (v_must_defs
, i
), stmt
, map
);
4545 rewrite_to_new_ssa_names_use
4546 (V_MUST_DEF_KILL_PTR (v_must_defs
, i
), map
);
4550 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4551 for (phi
= phi_nodes (e
->dest
); phi
; phi
= TREE_CHAIN (phi
))
4553 rewrite_to_new_ssa_names_use
4554 (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
), map
);
4556 if (e
->flags
& EDGE_ABNORMAL
)
4558 tree op
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4559 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op
) = 1;
4564 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4565 by the mapping MAP. */
4568 rewrite_to_new_ssa_names (basic_block
*region
, unsigned n_region
, htab_t map
)
4572 for (r
= 0; r
< n_region
; r
++)
4573 rewrite_to_new_ssa_names_bb (region
[r
], map
);
4576 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4577 important exit edge EXIT. By important we mean that no SSA name defined
4578 inside region is live over the other exit edges of the region. All entry
4579 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4580 to the duplicate of the region. SSA form, dominance and loop information
4581 is updated. The new basic blocks are stored to REGION_COPY in the same
4582 order as they had in REGION, provided that REGION_COPY is not NULL.
4583 The function returns false if it is unable to copy the region,
4587 tree_duplicate_sese_region (edge entry
, edge exit
,
4588 basic_block
*region
, unsigned n_region
,
4589 basic_block
*region_copy
)
4591 unsigned i
, n_doms
, ver
;
4592 bool free_region_copy
= false, copying_header
= false;
4593 struct loop
*loop
= entry
->dest
->loop_father
;
4598 htab_t ssa_name_map
= NULL
;
4602 if (!can_copy_bbs_p (region
, n_region
))
4605 /* Some sanity checking. Note that we do not check for all possible
4606 missuses of the functions. I.e. if you ask to copy something weird,
4607 it will work, but the state of structures probably will not be
4610 for (i
= 0; i
< n_region
; i
++)
4612 /* We do not handle subloops, i.e. all the blocks must belong to the
4614 if (region
[i
]->loop_father
!= loop
)
4617 if (region
[i
] != entry
->dest
4618 && region
[i
] == loop
->header
)
4624 /* In case the function is used for loop header copying (which is the primary
4625 use), ensure that EXIT and its copy will be new latch and entry edges. */
4626 if (loop
->header
== entry
->dest
)
4628 copying_header
= true;
4629 loop
->copy
= loop
->outer
;
4631 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
4634 for (i
= 0; i
< n_region
; i
++)
4635 if (region
[i
] != exit
->src
4636 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
4642 region_copy
= xmalloc (sizeof (basic_block
) * n_region
);
4643 free_region_copy
= true;
4646 gcc_assert (!any_marked_for_rewrite_p ());
4648 /* Record blocks outside the region that are duplicated by something
4650 doms
= xmalloc (sizeof (basic_block
) * n_basic_blocks
);
4651 n_doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
, doms
);
4653 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
);
4654 definitions
= marked_ssa_names ();
4658 loop
->header
= exit
->dest
;
4659 loop
->latch
= exit
->src
;
4662 /* Redirect the entry and add the phi node arguments. */
4663 redirected
= redirect_edge_and_branch (entry
, entry
->dest
->rbi
->copy
);
4664 gcc_assert (redirected
!= NULL
);
4665 flush_pending_stmts (entry
);
4667 /* Concerning updating of dominators: We must recount dominators
4668 for entry block and its copy. Anything that is outside of the region, but
4669 was dominated by something inside needs recounting as well. */
4670 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
4671 doms
[n_doms
++] = entry
->dest
->rbi
->original
;
4672 iterate_fix_dominators (CDI_DOMINATORS
, doms
, n_doms
);
4675 /* Add the other phi node arguments. */
4676 add_phi_args_after_copy (region_copy
, n_region
);
4678 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4679 uses, it should be possible to emit phi nodes just for definitions that
4680 are used outside region. */
4681 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4683 tree name
= ssa_name (ver
);
4685 phi
= create_phi_node (name
, exit
->dest
);
4686 add_phi_arg (&phi
, name
, exit
);
4687 add_phi_arg (&phi
, name
, exit_copy
);
4689 SSA_NAME_DEF_STMT (name
) = phi
;
4692 /* And create new definitions inside region and its copy. TODO -- once we
4693 have immediate uses, it might be better to leave definitions in region
4694 unchanged, create new ssa names for phi nodes on exit, and rewrite
4695 the uses, to avoid changing the copied region. */
4696 allocate_ssa_names (definitions
, &ssa_name_map
);
4697 rewrite_to_new_ssa_names (region
, n_region
, ssa_name_map
);
4698 allocate_ssa_names (definitions
, &ssa_name_map
);
4699 rewrite_to_new_ssa_names (region_copy
, n_region
, ssa_name_map
);
4700 htab_delete (ssa_name_map
);
4702 if (free_region_copy
)
4705 unmark_all_for_rewrite ();
4706 BITMAP_XFREE (definitions
);
4711 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4714 dump_function_to_file (tree fn
, FILE *file
, int flags
)
4716 tree arg
, vars
, var
;
4717 bool ignore_topmost_bind
= false, any_var
= false;
4721 fprintf (file
, "%s (", lang_hooks
.decl_printable_name (fn
, 2));
4723 arg
= DECL_ARGUMENTS (fn
);
4726 print_generic_expr (file
, arg
, dump_flags
);
4727 if (TREE_CHAIN (arg
))
4728 fprintf (file
, ", ");
4729 arg
= TREE_CHAIN (arg
);
4731 fprintf (file
, ")\n");
4733 if (flags
& TDF_RAW
)
4735 dump_node (fn
, TDF_SLIM
| flags
, file
);
4739 /* When GIMPLE is lowered, the variables are no longer available in
4740 BIND_EXPRs, so display them separately. */
4741 if (cfun
&& cfun
->unexpanded_var_list
)
4743 ignore_topmost_bind
= true;
4745 fprintf (file
, "{\n");
4746 for (vars
= cfun
->unexpanded_var_list
; vars
; vars
= TREE_CHAIN (vars
))
4748 var
= TREE_VALUE (vars
);
4750 print_generic_decl (file
, var
, flags
);
4751 fprintf (file
, "\n");
4757 if (basic_block_info
)
4759 /* Make a CFG based dump. */
4760 check_bb_profile (ENTRY_BLOCK_PTR
, file
);
4761 if (!ignore_topmost_bind
)
4762 fprintf (file
, "{\n");
4764 if (any_var
&& n_basic_blocks
)
4765 fprintf (file
, "\n");
4768 dump_generic_bb (file
, bb
, 2, flags
);
4770 fprintf (file
, "}\n");
4771 check_bb_profile (EXIT_BLOCK_PTR
, file
);
4777 /* Make a tree based dump. */
4778 chain
= DECL_SAVED_TREE (fn
);
4780 if (TREE_CODE (chain
) == BIND_EXPR
)
4782 if (ignore_topmost_bind
)
4784 chain
= BIND_EXPR_BODY (chain
);
4792 if (!ignore_topmost_bind
)
4793 fprintf (file
, "{\n");
4798 fprintf (file
, "\n");
4800 print_generic_stmt_indented (file
, chain
, flags
, indent
);
4801 if (ignore_topmost_bind
)
4802 fprintf (file
, "}\n");
4805 fprintf (file
, "\n\n");
4809 /* Pretty print of the loops intermediate representation. */
4810 static void print_loop (FILE *, struct loop
*, int);
4811 static void print_pred_bbs (FILE *, basic_block bb
);
4812 static void print_succ_bbs (FILE *, basic_block bb
);
4815 /* Print the predecessors indexes of edge E on FILE. */
4818 print_pred_bbs (FILE *file
, basic_block bb
)
4823 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4824 fprintf (file
, "bb_%d", e
->src
->index
);
4828 /* Print the successors indexes of edge E on FILE. */
4831 print_succ_bbs (FILE *file
, basic_block bb
)
4836 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4837 fprintf (file
, "bb_%d", e
->src
->index
);
4841 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4844 print_loop (FILE *file
, struct loop
*loop
, int indent
)
4852 s_indent
= (char *) alloca ((size_t) indent
+ 1);
4853 memset ((void *) s_indent
, ' ', (size_t) indent
);
4854 s_indent
[indent
] = '\0';
4856 /* Print the loop's header. */
4857 fprintf (file
, "%sloop_%d\n", s_indent
, loop
->num
);
4859 /* Print the loop's body. */
4860 fprintf (file
, "%s{\n", s_indent
);
4862 if (bb
->loop_father
== loop
)
4864 /* Print the basic_block's header. */
4865 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
4866 print_pred_bbs (file
, bb
);
4867 fprintf (file
, "}, succs = {");
4868 print_succ_bbs (file
, bb
);
4869 fprintf (file
, "})\n");
4871 /* Print the basic_block's body. */
4872 fprintf (file
, "%s {\n", s_indent
);
4873 tree_dump_bb (bb
, file
, indent
+ 4);
4874 fprintf (file
, "%s }\n", s_indent
);
4877 print_loop (file
, loop
->inner
, indent
+ 2);
4878 fprintf (file
, "%s}\n", s_indent
);
4879 print_loop (file
, loop
->next
, indent
);
4883 /* Follow a CFG edge from the entry point of the program, and on entry
4884 of a loop, pretty print the loop structure on FILE. */
4887 print_loop_ir (FILE *file
)
4891 bb
= BASIC_BLOCK (0);
4892 if (bb
&& bb
->loop_father
)
4893 print_loop (file
, bb
->loop_father
, 0);
4897 /* Debugging loops structure at tree level. */
4900 debug_loop_ir (void)
4902 print_loop_ir (stderr
);
4906 /* Return true if BB ends with a call, possibly followed by some
4907 instructions that must stay with the call. Return false,
4911 tree_block_ends_with_call_p (basic_block bb
)
4913 block_stmt_iterator bsi
= bsi_last (bb
);
4914 return get_call_expr_in (bsi_stmt (bsi
)) != NULL
;
4918 /* Return true if BB ends with a conditional branch. Return false,
4922 tree_block_ends_with_condjump_p (basic_block bb
)
4924 tree stmt
= tsi_stmt (bsi_last (bb
).tsi
);
4925 return (TREE_CODE (stmt
) == COND_EXPR
);
4929 /* Return true if we need to add fake edge to exit at statement T.
4930 Helper function for tree_flow_call_edges_add. */
4933 need_fake_edge_p (tree t
)
4937 /* NORETURN and LONGJMP calls already have an edge to exit.
4938 CONST, PURE and ALWAYS_RETURN calls do not need one.
4939 We don't currently check for CONST and PURE here, although
4940 it would be a good idea, because those attributes are
4941 figured out from the RTL in mark_constant_function, and
4942 the counter incrementation code from -fprofile-arcs
4943 leads to different results from -fbranch-probabilities. */
4944 call
= get_call_expr_in (t
);
4946 && !(call_expr_flags (call
) &
4947 (ECF_NORETURN
| ECF_LONGJMP
| ECF_ALWAYS_RETURN
)))
4950 if (TREE_CODE (t
) == ASM_EXPR
4951 && (ASM_VOLATILE_P (t
) || ASM_INPUT_P (t
)))
4958 /* Add fake edges to the function exit for any non constant and non
4959 noreturn calls, volatile inline assembly in the bitmap of blocks
4960 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4961 the number of blocks that were split.
4963 The goal is to expose cases in which entering a basic block does
4964 not imply that all subsequent instructions must be executed. */
4967 tree_flow_call_edges_add (sbitmap blocks
)
4970 int blocks_split
= 0;
4971 int last_bb
= last_basic_block
;
4972 bool check_last_block
= false;
4974 if (n_basic_blocks
== 0)
4978 check_last_block
= true;
4980 check_last_block
= TEST_BIT (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
4982 /* In the last basic block, before epilogue generation, there will be
4983 a fallthru edge to EXIT. Special care is required if the last insn
4984 of the last basic block is a call because make_edge folds duplicate
4985 edges, which would result in the fallthru edge also being marked
4986 fake, which would result in the fallthru edge being removed by
4987 remove_fake_edges, which would result in an invalid CFG.
4989 Moreover, we can't elide the outgoing fake edge, since the block
4990 profiler needs to take this into account in order to solve the minimal
4991 spanning tree in the case that the call doesn't return.
4993 Handle this by adding a dummy instruction in a new last basic block. */
4994 if (check_last_block
)
4997 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
4998 block_stmt_iterator bsi
= bsi_last (bb
);
5000 if (!bsi_end_p (bsi
))
5003 if (need_fake_edge_p (t
))
5007 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5008 if (e
->dest
== EXIT_BLOCK_PTR
)
5010 bsi_insert_on_edge (e
, build_empty_stmt ());
5011 bsi_commit_edge_inserts ((int *)NULL
);
5017 /* Now add fake edges to the function exit for any non constant
5018 calls since there is no way that we can determine if they will
5020 for (i
= 0; i
< last_bb
; i
++)
5022 basic_block bb
= BASIC_BLOCK (i
);
5023 block_stmt_iterator bsi
;
5024 tree stmt
, last_stmt
;
5029 if (blocks
&& !TEST_BIT (blocks
, i
))
5032 bsi
= bsi_last (bb
);
5033 if (!bsi_end_p (bsi
))
5035 last_stmt
= bsi_stmt (bsi
);
5038 stmt
= bsi_stmt (bsi
);
5039 if (need_fake_edge_p (stmt
))
5042 /* The handling above of the final block before the
5043 epilogue should be enough to verify that there is
5044 no edge to the exit block in CFG already.
5045 Calling make_edge in such case would cause us to
5046 mark that edge as fake and remove it later. */
5047 #ifdef ENABLE_CHECKING
5048 if (stmt
== last_stmt
)
5051 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5052 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
5056 /* Note that the following may create a new basic block
5057 and renumber the existing basic blocks. */
5058 if (stmt
!= last_stmt
)
5060 e
= split_block (bb
, stmt
);
5064 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
5068 while (!bsi_end_p (bsi
));
5073 verify_flow_info ();
5075 return blocks_split
;
5079 tree_purge_dead_eh_edges (basic_block bb
)
5081 bool changed
= false;
5084 tree stmt
= last_stmt (bb
);
5086 if (stmt
&& tree_can_throw_internal (stmt
))
5089 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5091 if (e
->flags
& EDGE_EH
)
5093 ssa_remove_edge (e
);
5100 /* Removal of dead EH edges might change dominators of not
5101 just immediate successors. E.g. when bb1 is changed so that
5102 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5103 eh edges purged by this function in:
5115 idom(bb5) must be recomputed. For now just free the dominance
5118 free_dominance_info (CDI_DOMINATORS
);
5124 tree_purge_all_dead_eh_edges (bitmap blocks
)
5126 bool changed
= false;
5130 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
5132 changed
|= tree_purge_dead_eh_edges (BASIC_BLOCK (i
));
5138 struct cfg_hooks tree_cfg_hooks
= {
5140 tree_verify_flow_info
,
5141 tree_dump_bb
, /* dump_bb */
5142 create_bb
, /* create_basic_block */
5143 tree_redirect_edge_and_branch
,/* redirect_edge_and_branch */
5144 tree_redirect_edge_and_branch_force
,/* redirect_edge_and_branch_force */
5145 remove_bb
, /* delete_basic_block */
5146 tree_split_block
, /* split_block */
5147 tree_move_block_after
, /* move_block_after */
5148 tree_can_merge_blocks_p
, /* can_merge_blocks_p */
5149 tree_merge_blocks
, /* merge_blocks */
5150 tree_predict_edge
, /* predict_edge */
5151 tree_predicted_by_p
, /* predicted_by_p */
5152 tree_can_duplicate_bb_p
, /* can_duplicate_block_p */
5153 tree_duplicate_bb
, /* duplicate_block */
5154 tree_split_edge
, /* split_edge */
5155 tree_make_forwarder_block
, /* make_forward_block */
5156 NULL
, /* tidy_fallthru_edge */
5157 tree_block_ends_with_call_p
, /* block_ends_with_call_p */
5158 tree_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
5159 tree_flow_call_edges_add
/* flow_call_edges_add */
5163 /* Split all critical edges. */
5166 split_critical_edges (void)
5174 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5175 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
5182 struct tree_opt_pass pass_split_crit_edges
=
5184 "crited", /* name */
5186 split_critical_edges
, /* execute */
5189 0, /* static_pass_number */
5190 TV_TREE_SPLIT_EDGES
, /* tv_id */
5191 PROP_cfg
, /* properties required */
5192 PROP_no_crit_edges
, /* properties_provided */
5193 0, /* properties_destroyed */
5194 0, /* todo_flags_start */
5195 TODO_dump_func
, /* todo_flags_finish */
5200 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5201 a temporary, make sure and register it to be renamed if necessary,
5202 and finally return the temporary. Put the statements to compute
5203 EXP before the current statement in BSI. */
5206 gimplify_val (block_stmt_iterator
*bsi
, tree type
, tree exp
)
5208 tree t
, new_stmt
, orig_stmt
;
5210 if (is_gimple_val (exp
))
5213 t
= make_rename_temp (type
, NULL
);
5214 new_stmt
= build (MODIFY_EXPR
, type
, t
, exp
);
5216 orig_stmt
= bsi_stmt (*bsi
);
5217 SET_EXPR_LOCUS (new_stmt
, EXPR_LOCUS (orig_stmt
));
5218 TREE_BLOCK (new_stmt
) = TREE_BLOCK (orig_stmt
);
5220 bsi_insert_before (bsi
, new_stmt
, BSI_SAME_STMT
);
5225 /* Build a ternary operation and gimplify it. Emit code before BSI.
5226 Return the gimple_val holding the result. */
5229 gimplify_build3 (block_stmt_iterator
*bsi
, enum tree_code code
,
5230 tree type
, tree a
, tree b
, tree c
)
5234 ret
= fold (build3 (code
, type
, a
, b
, c
));
5237 return gimplify_val (bsi
, type
, ret
);
5240 /* Build a binary operation and gimplify it. Emit code before BSI.
5241 Return the gimple_val holding the result. */
5244 gimplify_build2 (block_stmt_iterator
*bsi
, enum tree_code code
,
5245 tree type
, tree a
, tree b
)
5249 ret
= fold (build2 (code
, type
, a
, b
));
5252 return gimplify_val (bsi
, type
, ret
);
5255 /* Build a unary operation and gimplify it. Emit code before BSI.
5256 Return the gimple_val holding the result. */
5259 gimplify_build1 (block_stmt_iterator
*bsi
, enum tree_code code
, tree type
,
5264 ret
= fold (build1 (code
, type
, a
));
5267 return gimplify_val (bsi
, type
, ret
);
5272 /* Emit return warnings. */
5275 execute_warn_function_return (void)
5277 #ifdef USE_MAPPED_LOCATION
5278 source_location location
;
5286 if (warn_missing_noreturn
5287 && !TREE_THIS_VOLATILE (cfun
->decl
)
5288 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0
5289 && !lang_hooks
.function
.missing_noreturn_ok_p (cfun
->decl
))
5290 warning ("%Jfunction might be possible candidate for "
5291 "attribute %<noreturn%>",
5294 /* If we have a path to EXIT, then we do return. */
5295 if (TREE_THIS_VOLATILE (cfun
->decl
)
5296 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
5298 #ifdef USE_MAPPED_LOCATION
5299 location
= UNKNOWN_LOCATION
;
5303 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5305 last
= last_stmt (e
->src
);
5306 if (TREE_CODE (last
) == RETURN_EXPR
5307 #ifdef USE_MAPPED_LOCATION
5308 && (location
= EXPR_LOCATION (last
)) != UNKNOWN_LOCATION
)
5310 && (locus
= EXPR_LOCUS (last
)) != NULL
)
5314 #ifdef USE_MAPPED_LOCATION
5315 if (location
== UNKNOWN_LOCATION
)
5316 location
= cfun
->function_end_locus
;
5317 warning ("%H%<noreturn%> function does return", &location
);
5320 locus
= &cfun
->function_end_locus
;
5321 warning ("%H%<noreturn%> function does return", locus
);
5325 /* If we see "return;" in some basic block, then we do reach the end
5326 without returning a value. */
5327 else if (warn_return_type
5328 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
5329 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
5331 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5333 tree last
= last_stmt (e
->src
);
5334 if (TREE_CODE (last
) == RETURN_EXPR
5335 && TREE_OPERAND (last
, 0) == NULL
)
5337 #ifdef USE_MAPPED_LOCATION
5338 location
= EXPR_LOCATION (last
);
5339 if (location
== UNKNOWN_LOCATION
)
5340 location
= cfun
->function_end_locus
;
5341 warning ("%Hcontrol reaches end of non-void function", &location
);
5343 locus
= EXPR_LOCUS (last
);
5345 locus
= &cfun
->function_end_locus
;
5346 warning ("%Hcontrol reaches end of non-void function", locus
);
5355 /* Given a basic block B which ends with a conditional and has
5356 precisely two successors, determine which of the edges is taken if
5357 the conditional is true and which is taken if the conditional is
5358 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5361 extract_true_false_edges_from_block (basic_block b
,
5365 edge e
= EDGE_SUCC (b
, 0);
5367 if (e
->flags
& EDGE_TRUE_VALUE
)
5370 *false_edge
= EDGE_SUCC (b
, 1);
5375 *true_edge
= EDGE_SUCC (b
, 1);
5379 struct tree_opt_pass pass_warn_function_return
=
5383 execute_warn_function_return
, /* execute */
5386 0, /* static_pass_number */
5388 PROP_cfg
, /* properties_required */
5389 0, /* properties_provided */
5390 0, /* properties_destroyed */
5391 0, /* todo_flags_start */
5392 0, /* todo_flags_finish */
5396 #include "gt-tree-cfg.h"