1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
46 #include "cfglayout.h"
48 /* This file contains functions for building the Control Flow Graph (CFG)
49 for a function tree. */
51 /* Local declarations. */
53 /* Initial capacity for the basic block array. */
54 static const int initial_cfg_capacity
= 20;
56 /* Mapping of labels to their associated blocks. This can greatly speed up
57 building of the CFG in code with lots of gotos. */
58 static GTY(()) varray_type label_to_block_map
;
63 long num_merged_labels
;
66 static struct cfg_stats_d cfg_stats
;
68 /* Nonzero if we found a computed goto while building basic blocks. */
69 static bool found_computed_goto
;
71 /* Basic blocks and flowgraphs. */
72 static basic_block
create_bb (void *, void *, basic_block
);
73 static void create_block_annotation (basic_block
);
74 static void free_blocks_annotations (void);
75 static void clear_blocks_annotations (void);
76 static void make_blocks (tree
);
77 static void factor_computed_gotos (void);
80 static void make_edges (void);
81 static void make_ctrl_stmt_edges (basic_block
);
82 static void make_exit_edges (basic_block
);
83 static void make_cond_expr_edges (basic_block
);
84 static void make_switch_expr_edges (basic_block
);
85 static void make_goto_expr_edges (basic_block
);
86 static edge
tree_redirect_edge_and_branch (edge
, basic_block
);
87 static edge
tree_try_redirect_by_replacing_jump (edge
, basic_block
);
88 static void split_critical_edges (void);
90 /* Various helpers. */
91 static inline bool stmt_starts_bb_p (tree
, tree
);
92 static int tree_verify_flow_info (void);
93 static void tree_make_forwarder_block (edge
);
94 static bool thread_jumps (void);
95 static bool tree_forwarder_block_p (basic_block
);
96 static void tree_cfg2vcg (FILE *);
98 /* Flowgraph optimization and cleanup. */
99 static void tree_merge_blocks (basic_block
, basic_block
);
100 static bool tree_can_merge_blocks_p (basic_block
, basic_block
);
101 static void remove_bb (basic_block
);
102 static bool cleanup_control_flow (void);
103 static bool cleanup_control_expr_graph (basic_block
, block_stmt_iterator
);
104 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
105 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
106 static tree
find_case_label_for_value (tree
, tree
);
107 static bool phi_alternatives_equal (basic_block
, edge
, edge
);
110 /*---------------------------------------------------------------------------
112 ---------------------------------------------------------------------------*/
114 /* Entry point to the CFG builder for trees. TP points to the list of
115 statements to be added to the flowgraph. */
118 build_tree_cfg (tree
*tp
)
120 /* Register specific tree functions. */
121 tree_register_cfg_hooks ();
123 /* Initialize rbi_pool. */
126 /* Initialize the basic block array. */
128 profile_status
= PROFILE_ABSENT
;
130 last_basic_block
= 0;
131 VARRAY_BB_INIT (basic_block_info
, initial_cfg_capacity
, "basic_block_info");
132 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
134 /* Build a mapping of labels to their associated blocks. */
135 VARRAY_BB_INIT (label_to_block_map
, initial_cfg_capacity
,
136 "label to block map");
138 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
139 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
141 found_computed_goto
= 0;
144 /* Computed gotos are hell to deal with, especially if there are
145 lots of them with a large number of destinations. So we factor
146 them to a common computed goto location before we build the
147 edge list. After we convert back to normal form, we will un-factor
148 the computed gotos since factoring introduces an unwanted jump. */
149 if (found_computed_goto
)
150 factor_computed_gotos ();
152 /* Make sure there is always at least one block, even if it's empty. */
153 if (n_basic_blocks
== 0)
154 create_empty_bb (ENTRY_BLOCK_PTR
);
156 create_block_annotation (ENTRY_BLOCK_PTR
);
157 create_block_annotation (EXIT_BLOCK_PTR
);
159 /* Adjust the size of the array. */
160 VARRAY_GROW (basic_block_info
, n_basic_blocks
);
162 /* To speed up statement iterator walks, we first purge dead labels. */
163 cleanup_dead_labels ();
165 /* Group case nodes to reduce the number of edges.
166 We do this after cleaning up dead labels because otherwise we miss
167 a lot of obvious case merging opportunities. */
168 group_case_labels ();
170 /* Create the edges of the flowgraph. */
173 /* Debugging dumps. */
175 /* Write the flowgraph to a VCG file. */
177 int local_dump_flags
;
178 FILE *dump_file
= dump_begin (TDI_vcg
, &local_dump_flags
);
181 tree_cfg2vcg (dump_file
);
182 dump_end (TDI_vcg
, dump_file
);
186 /* Dump a textual representation of the flowgraph. */
188 dump_tree_cfg (dump_file
, dump_flags
);
192 execute_build_cfg (void)
194 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl
));
197 struct tree_opt_pass pass_build_cfg
=
201 execute_build_cfg
, /* execute */
204 0, /* static_pass_number */
205 TV_TREE_CFG
, /* tv_id */
206 PROP_gimple_leh
, /* properties_required */
207 PROP_cfg
, /* properties_provided */
208 0, /* properties_destroyed */
209 0, /* todo_flags_start */
210 TODO_verify_stmts
, /* todo_flags_finish */
214 /* Search the CFG for any computed gotos. If found, factor them to a
215 common computed goto site. Also record the location of that site so
216 that we can un-factor the gotos after we have converted back to
220 factor_computed_gotos (void)
223 tree factored_label_decl
= NULL
;
225 tree factored_computed_goto_label
= NULL
;
226 tree factored_computed_goto
= NULL
;
228 /* We know there are one or more computed gotos in this function.
229 Examine the last statement in each basic block to see if the block
230 ends with a computed goto. */
234 block_stmt_iterator bsi
= bsi_last (bb
);
239 last
= bsi_stmt (bsi
);
241 /* Ignore the computed goto we create when we factor the original
243 if (last
== factored_computed_goto
)
246 /* If the last statement is a computed goto, factor it. */
247 if (computed_goto_p (last
))
251 /* The first time we find a computed goto we need to create
252 the factored goto block and the variable each original
253 computed goto will use for their goto destination. */
254 if (! factored_computed_goto
)
256 basic_block new_bb
= create_empty_bb (bb
);
257 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
259 /* Create the destination of the factored goto. Each original
260 computed goto will put its desired destination into this
261 variable and jump to the label we create immediately
263 var
= create_tmp_var (ptr_type_node
, "gotovar");
265 /* Build a label for the new block which will contain the
266 factored computed goto. */
267 factored_label_decl
= create_artificial_label ();
268 factored_computed_goto_label
269 = build1 (LABEL_EXPR
, void_type_node
, factored_label_decl
);
270 bsi_insert_after (&new_bsi
, factored_computed_goto_label
,
273 /* Build our new computed goto. */
274 factored_computed_goto
= build1 (GOTO_EXPR
, void_type_node
, var
);
275 bsi_insert_after (&new_bsi
, factored_computed_goto
,
279 /* Copy the original computed goto's destination into VAR. */
280 assignment
= build (MODIFY_EXPR
, ptr_type_node
,
281 var
, GOTO_DESTINATION (last
));
282 bsi_insert_before (&bsi
, assignment
, BSI_SAME_STMT
);
284 /* And re-vector the computed goto to the new destination. */
285 GOTO_DESTINATION (last
) = factored_label_decl
;
291 /* Create annotations for a single basic block. */
294 create_block_annotation (basic_block bb
)
296 /* Verify that the tree_annotations field is clear. */
297 gcc_assert (!bb
->tree_annotations
);
298 bb
->tree_annotations
= ggc_alloc_cleared (sizeof (struct bb_ann_d
));
302 /* Free the annotations for all the basic blocks. */
304 static void free_blocks_annotations (void)
306 clear_blocks_annotations ();
310 /* Clear the annotations for all the basic blocks. */
313 clear_blocks_annotations (void)
317 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
318 bb
->tree_annotations
= NULL
;
322 /* Build a flowgraph for the statement_list STMT_LIST. */
325 make_blocks (tree stmt_list
)
327 tree_stmt_iterator i
= tsi_start (stmt_list
);
329 bool start_new_block
= true;
330 bool first_stmt_of_list
= true;
331 basic_block bb
= ENTRY_BLOCK_PTR
;
333 while (!tsi_end_p (i
))
340 /* If the statement starts a new basic block or if we have determined
341 in a previous pass that we need to create a new block for STMT, do
343 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
345 if (!first_stmt_of_list
)
346 stmt_list
= tsi_split_statement_list_before (&i
);
347 bb
= create_basic_block (stmt_list
, NULL
, bb
);
348 start_new_block
= false;
351 /* Now add STMT to BB and create the subgraphs for special statement
353 set_bb_for_stmt (stmt
, bb
);
355 if (computed_goto_p (stmt
))
356 found_computed_goto
= true;
358 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
360 if (stmt_ends_bb_p (stmt
))
361 start_new_block
= true;
364 first_stmt_of_list
= false;
369 /* Create and return a new empty basic block after bb AFTER. */
372 create_bb (void *h
, void *e
, basic_block after
)
378 /* Create and initialize a new basic block. Since alloc_block uses
379 ggc_alloc_cleared to allocate a basic block, we do not have to
380 clear the newly allocated basic block here. */
383 bb
->index
= last_basic_block
;
385 bb
->stmt_list
= h
? h
: alloc_stmt_list ();
387 /* Add the new block to the linked list of blocks. */
388 link_block (bb
, after
);
390 /* Grow the basic block array if needed. */
391 if ((size_t) last_basic_block
== VARRAY_SIZE (basic_block_info
))
393 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
394 VARRAY_GROW (basic_block_info
, new_size
);
397 /* Add the newly created block to the array. */
398 BASIC_BLOCK (last_basic_block
) = bb
;
400 create_block_annotation (bb
);
405 initialize_bb_rbi (bb
);
410 /*---------------------------------------------------------------------------
412 ---------------------------------------------------------------------------*/
414 /* Join all the blocks in the flowgraph. */
421 /* Create an edge from entry to the first block with executable
423 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
425 /* Traverse basic block array placing edges. */
428 tree first
= first_stmt (bb
);
429 tree last
= last_stmt (bb
);
433 /* Edges for statements that always alter flow control. */
434 if (is_ctrl_stmt (last
))
435 make_ctrl_stmt_edges (bb
);
437 /* Edges for statements that sometimes alter flow control. */
438 if (is_ctrl_altering_stmt (last
))
439 make_exit_edges (bb
);
442 /* Finally, if no edges were created above, this is a regular
443 basic block that only needs a fallthru edge. */
444 if (EDGE_COUNT (bb
->succs
) == 0)
445 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
448 /* We do not care about fake edges, so remove any that the CFG
449 builder inserted for completeness. */
450 remove_fake_exit_edges ();
452 /* Clean up the graph and warn for unreachable code. */
457 /* Create edges for control statement at basic block BB. */
460 make_ctrl_stmt_edges (basic_block bb
)
462 tree last
= last_stmt (bb
);
465 switch (TREE_CODE (last
))
468 make_goto_expr_edges (bb
);
472 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
476 make_cond_expr_edges (bb
);
480 make_switch_expr_edges (bb
);
484 make_eh_edges (last
);
485 /* Yet another NORETURN hack. */
486 if (EDGE_COUNT (bb
->succs
) == 0)
487 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
496 /* Create exit edges for statements in block BB that alter the flow of
497 control. Statements that alter the control flow are 'goto', 'return'
498 and calls to non-returning functions. */
501 make_exit_edges (basic_block bb
)
503 tree last
= last_stmt (bb
), op
;
506 switch (TREE_CODE (last
))
509 /* If this function receives a nonlocal goto, then we need to
510 make edges from this call site to all the nonlocal goto
512 if (TREE_SIDE_EFFECTS (last
)
513 && current_function_has_nonlocal_label
)
514 make_goto_expr_edges (bb
);
516 /* If this statement has reachable exception handlers, then
517 create abnormal edges to them. */
518 make_eh_edges (last
);
520 /* Some calls are known not to return. For such calls we create
523 We really need to revamp how we build edges so that it's not
524 such a bloody pain to avoid creating edges for this case since
525 all we do is remove these edges when we're done building the
527 if (call_expr_flags (last
) & (ECF_NORETURN
| ECF_LONGJMP
))
529 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
533 /* Don't forget the fall-thru edge. */
534 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
538 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
539 may have an abnormal edge. Search the RHS for this case and
540 create any required edges. */
541 op
= get_call_expr_in (last
);
542 if (op
&& TREE_SIDE_EFFECTS (op
)
543 && current_function_has_nonlocal_label
)
544 make_goto_expr_edges (bb
);
546 make_eh_edges (last
);
547 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
556 /* Create the edges for a COND_EXPR starting at block BB.
557 At this point, both clauses must contain only simple gotos. */
560 make_cond_expr_edges (basic_block bb
)
562 tree entry
= last_stmt (bb
);
563 basic_block then_bb
, else_bb
;
564 tree then_label
, else_label
;
567 gcc_assert (TREE_CODE (entry
) == COND_EXPR
);
569 /* Entry basic blocks for each component. */
570 then_label
= GOTO_DESTINATION (COND_EXPR_THEN (entry
));
571 else_label
= GOTO_DESTINATION (COND_EXPR_ELSE (entry
));
572 then_bb
= label_to_block (then_label
);
573 else_bb
= label_to_block (else_label
);
575 make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
576 make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
580 /* Create the edges for a SWITCH_EXPR starting at block BB.
581 At this point, the switch body has been lowered and the
582 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
585 make_switch_expr_edges (basic_block bb
)
587 tree entry
= last_stmt (bb
);
591 vec
= SWITCH_LABELS (entry
);
592 n
= TREE_VEC_LENGTH (vec
);
594 for (i
= 0; i
< n
; ++i
)
596 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
597 basic_block label_bb
= label_to_block (lab
);
598 make_edge (bb
, label_bb
, 0);
603 /* Return the basic block holding label DEST. */
606 label_to_block (tree dest
)
608 int uid
= LABEL_DECL_UID (dest
);
610 /* We would die hard when faced by an undefined label. Emit a label to
611 the very first basic block. This will hopefully make even the dataflow
612 and undefined variable warnings quite right. */
613 if ((errorcount
|| sorrycount
) && uid
< 0)
615 block_stmt_iterator bsi
= bsi_start (BASIC_BLOCK (0));
618 stmt
= build1 (LABEL_EXPR
, void_type_node
, dest
);
619 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
620 uid
= LABEL_DECL_UID (dest
);
622 return VARRAY_BB (label_to_block_map
, uid
);
626 /* Create edges for a goto statement at block BB. */
629 make_goto_expr_edges (basic_block bb
)
632 basic_block target_bb
;
634 block_stmt_iterator last
= bsi_last (bb
);
636 goto_t
= bsi_stmt (last
);
638 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
639 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
640 from a nonlocal goto. */
641 if (TREE_CODE (goto_t
) != GOTO_EXPR
)
643 dest
= error_mark_node
;
648 dest
= GOTO_DESTINATION (goto_t
);
651 /* A GOTO to a local label creates normal edges. */
652 if (simple_goto_p (goto_t
))
654 edge e
= make_edge (bb
, label_to_block (dest
), EDGE_FALLTHRU
);
655 #ifdef USE_MAPPED_LOCATION
656 e
->goto_locus
= EXPR_LOCATION (goto_t
);
658 e
->goto_locus
= EXPR_LOCUS (goto_t
);
664 /* Nothing more to do for nonlocal gotos. */
665 if (TREE_CODE (dest
) == LABEL_DECL
)
668 /* Computed gotos remain. */
671 /* Look for the block starting with the destination label. In the
672 case of a computed goto, make an edge to any label block we find
674 FOR_EACH_BB (target_bb
)
676 block_stmt_iterator bsi
;
678 for (bsi
= bsi_start (target_bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
680 tree target
= bsi_stmt (bsi
);
682 if (TREE_CODE (target
) != LABEL_EXPR
)
686 /* Computed GOTOs. Make an edge to every label block that has
687 been marked as a potential target for a computed goto. */
688 (FORCED_LABEL (LABEL_EXPR_LABEL (target
)) && for_call
== 0)
689 /* Nonlocal GOTO target. Make an edge to every label block
690 that has been marked as a potential target for a nonlocal
692 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target
)) && for_call
== 1))
694 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
700 /* Degenerate case of computed goto with no labels. */
701 if (!for_call
&& EDGE_COUNT (bb
->succs
) == 0)
702 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
706 /*---------------------------------------------------------------------------
708 ---------------------------------------------------------------------------*/
710 /* Remove unreachable blocks and other miscellaneous clean up work. */
713 cleanup_tree_cfg (void)
717 timevar_push (TV_TREE_CLEANUP_CFG
);
719 retval
= cleanup_control_flow ();
720 retval
|= delete_unreachable_blocks ();
721 retval
|= thread_jumps ();
723 #ifdef ENABLE_CHECKING
726 gcc_assert (!cleanup_control_flow ());
727 gcc_assert (!delete_unreachable_blocks ());
728 gcc_assert (!thread_jumps ());
732 /* Merging the blocks creates no new opportunities for the other
733 optimizations, so do it here. */
738 #ifdef ENABLE_CHECKING
741 timevar_pop (TV_TREE_CLEANUP_CFG
);
746 /* Cleanup useless labels in basic blocks. This is something we wish
747 to do early because it allows us to group case labels before creating
748 the edges for the CFG, and it speeds up block statement iterators in
750 We only run this pass once, running it more than once is probably not
753 /* A map from basic block index to the leading label of that block. */
754 static tree
*label_for_bb
;
756 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
758 update_eh_label (struct eh_region
*region
)
760 tree old_label
= get_eh_region_tree_label (region
);
764 basic_block bb
= label_to_block (old_label
);
766 /* ??? After optimizing, there may be EH regions with labels
767 that have already been removed from the function body, so
768 there is no basic block for them. */
772 new_label
= label_for_bb
[bb
->index
];
773 set_eh_region_tree_label (region
, new_label
);
777 /* Given LABEL return the first label in the same basic block. */
779 main_block_label (tree label
)
781 basic_block bb
= label_to_block (label
);
783 /* label_to_block possibly inserted undefined label into the chain. */
784 if (!label_for_bb
[bb
->index
])
785 label_for_bb
[bb
->index
] = label
;
786 return label_for_bb
[bb
->index
];
789 /* Cleanup redundant labels. This is a three-step process:
790 1) Find the leading label for each block.
791 2) Redirect all references to labels to the leading labels.
792 3) Cleanup all useless labels. */
795 cleanup_dead_labels (void)
798 label_for_bb
= xcalloc (last_basic_block
, sizeof (tree
));
800 /* Find a suitable label for each block. We use the first user-defined
801 label if there is one, or otherwise just the first label we see. */
804 block_stmt_iterator i
;
806 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
808 tree label
, stmt
= bsi_stmt (i
);
810 if (TREE_CODE (stmt
) != LABEL_EXPR
)
813 label
= LABEL_EXPR_LABEL (stmt
);
815 /* If we have not yet seen a label for the current block,
816 remember this one and see if there are more labels. */
817 if (! label_for_bb
[bb
->index
])
819 label_for_bb
[bb
->index
] = label
;
823 /* If we did see a label for the current block already, but it
824 is an artificially created label, replace it if the current
825 label is a user defined label. */
826 if (! DECL_ARTIFICIAL (label
)
827 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
]))
829 label_for_bb
[bb
->index
] = label
;
835 /* Now redirect all jumps/branches to the selected label.
836 First do so for each block ending in a control statement. */
839 tree stmt
= last_stmt (bb
);
843 switch (TREE_CODE (stmt
))
847 tree true_branch
, false_branch
;
849 true_branch
= COND_EXPR_THEN (stmt
);
850 false_branch
= COND_EXPR_ELSE (stmt
);
852 GOTO_DESTINATION (true_branch
)
853 = main_block_label (GOTO_DESTINATION (true_branch
));
854 GOTO_DESTINATION (false_branch
)
855 = main_block_label (GOTO_DESTINATION (false_branch
));
863 tree vec
= SWITCH_LABELS (stmt
);
864 size_t n
= TREE_VEC_LENGTH (vec
);
866 /* Replace all destination labels. */
867 for (i
= 0; i
< n
; ++i
)
868 CASE_LABEL (TREE_VEC_ELT (vec
, i
))
869 = main_block_label (CASE_LABEL (TREE_VEC_ELT (vec
, i
)));
874 /* We have to handle GOTO_EXPRs until they're removed, and we don't
875 remove them until after we've created the CFG edges. */
877 if (! computed_goto_p (stmt
))
879 GOTO_DESTINATION (stmt
)
880 = main_block_label (GOTO_DESTINATION (stmt
));
889 for_each_eh_region (update_eh_label
);
891 /* Finally, purge dead labels. All user-defined labels and labels that
892 can be the target of non-local gotos are preserved. */
895 block_stmt_iterator i
;
896 tree label_for_this_bb
= label_for_bb
[bb
->index
];
898 if (! label_for_this_bb
)
901 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
903 tree label
, stmt
= bsi_stmt (i
);
905 if (TREE_CODE (stmt
) != LABEL_EXPR
)
908 label
= LABEL_EXPR_LABEL (stmt
);
910 if (label
== label_for_this_bb
911 || ! DECL_ARTIFICIAL (label
)
912 || DECL_NONLOCAL (label
))
922 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
923 and scan the sorted vector of cases. Combine the ones jumping to the
925 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
928 group_case_labels (void)
934 tree stmt
= last_stmt (bb
);
935 if (stmt
&& TREE_CODE (stmt
) == SWITCH_EXPR
)
937 tree labels
= SWITCH_LABELS (stmt
);
938 int old_size
= TREE_VEC_LENGTH (labels
);
939 int i
, j
, new_size
= old_size
;
940 tree default_case
= TREE_VEC_ELT (labels
, old_size
- 1);
943 /* The default label is always the last case in a switch
944 statement after gimplification. */
945 default_label
= CASE_LABEL (default_case
);
947 /* Look for possible opportunities to merge cases.
948 Ignore the last element of the label vector because it
949 must be the default case. */
951 while (i
< old_size
- 1)
953 tree base_case
, base_label
, base_high
, type
;
954 base_case
= TREE_VEC_ELT (labels
, i
);
956 gcc_assert (base_case
);
957 base_label
= CASE_LABEL (base_case
);
959 /* Discard cases that have the same destination as the
961 if (base_label
== default_label
)
963 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
969 type
= TREE_TYPE (CASE_LOW (base_case
));
970 base_high
= CASE_HIGH (base_case
) ?
971 CASE_HIGH (base_case
) : CASE_LOW (base_case
);
973 /* Try to merge case labels. Break out when we reach the end
974 of the label vector or when we cannot merge the next case
975 label with the current one. */
976 while (i
< old_size
- 1)
978 tree merge_case
= TREE_VEC_ELT (labels
, i
);
979 tree merge_label
= CASE_LABEL (merge_case
);
980 tree t
= int_const_binop (PLUS_EXPR
, base_high
,
981 integer_one_node
, 1);
983 /* Merge the cases if they jump to the same place,
984 and their ranges are consecutive. */
985 if (merge_label
== base_label
986 && tree_int_cst_equal (CASE_LOW (merge_case
), t
))
988 base_high
= CASE_HIGH (merge_case
) ?
989 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
990 CASE_HIGH (base_case
) = base_high
;
991 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1000 /* Compress the case labels in the label vector, and adjust the
1001 length of the vector. */
1002 for (i
= 0, j
= 0; i
< new_size
; i
++)
1004 while (! TREE_VEC_ELT (labels
, j
))
1006 TREE_VEC_ELT (labels
, i
) = TREE_VEC_ELT (labels
, j
++);
1008 TREE_VEC_LENGTH (labels
) = new_size
;
1013 /* Checks whether we can merge block B into block A. */
1016 tree_can_merge_blocks_p (basic_block a
, basic_block b
)
1019 block_stmt_iterator bsi
;
1021 if (EDGE_COUNT (a
->succs
) != 1)
1024 if (EDGE_SUCC (a
, 0)->flags
& EDGE_ABNORMAL
)
1027 if (EDGE_SUCC (a
, 0)->dest
!= b
)
1030 if (b
== EXIT_BLOCK_PTR
)
1033 if (EDGE_COUNT (b
->preds
) > 1)
1036 /* If A ends by a statement causing exceptions or something similar, we
1037 cannot merge the blocks. */
1038 stmt
= last_stmt (a
);
1039 if (stmt
&& stmt_ends_bb_p (stmt
))
1042 /* Do not allow a block with only a non-local label to be merged. */
1043 if (stmt
&& TREE_CODE (stmt
) == LABEL_EXPR
1044 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1047 /* There may be no phi nodes at the start of b. Most of these degenerate
1048 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1052 /* Do not remove user labels. */
1053 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1055 stmt
= bsi_stmt (bsi
);
1056 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1058 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt
)))
1066 /* Merge block B into block A. */
1069 tree_merge_blocks (basic_block a
, basic_block b
)
1071 block_stmt_iterator bsi
;
1072 tree_stmt_iterator last
;
1075 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1077 /* Ensure that B follows A. */
1078 move_block_after (b
, a
);
1080 gcc_assert (EDGE_SUCC (a
, 0)->flags
& EDGE_FALLTHRU
);
1081 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1083 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1084 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
);)
1086 if (TREE_CODE (bsi_stmt (bsi
)) == LABEL_EXPR
)
1090 set_bb_for_stmt (bsi_stmt (bsi
), a
);
1095 /* Merge the chains. */
1096 last
= tsi_last (a
->stmt_list
);
1097 tsi_link_after (&last
, b
->stmt_list
, TSI_NEW_STMT
);
1098 b
->stmt_list
= NULL
;
1102 /* Walk the function tree removing unnecessary statements.
1104 * Empty statement nodes are removed
1106 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1108 * Unnecessary COND_EXPRs are removed
1110 * Some unnecessary BIND_EXPRs are removed
1112 Clearly more work could be done. The trick is doing the analysis
1113 and removal fast enough to be a net improvement in compile times.
1115 Note that when we remove a control structure such as a COND_EXPR
1116 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1117 to ensure we eliminate all the useless code. */
1128 static void remove_useless_stmts_1 (tree
*, struct rus_data
*);
1131 remove_useless_stmts_warn_notreached (tree stmt
)
1133 if (EXPR_HAS_LOCATION (stmt
))
1135 location_t loc
= EXPR_LOCATION (stmt
);
1136 warning ("%Hwill never be executed", &loc
);
1140 switch (TREE_CODE (stmt
))
1142 case STATEMENT_LIST
:
1144 tree_stmt_iterator i
;
1145 for (i
= tsi_start (stmt
); !tsi_end_p (i
); tsi_next (&i
))
1146 if (remove_useless_stmts_warn_notreached (tsi_stmt (i
)))
1152 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt
)))
1154 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt
)))
1156 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt
)))
1160 case TRY_FINALLY_EXPR
:
1161 case TRY_CATCH_EXPR
:
1162 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 0)))
1164 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 1)))
1169 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt
));
1170 case EH_FILTER_EXPR
:
1171 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt
));
1173 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt
));
1176 /* Not a live container. */
1184 remove_useless_stmts_cond (tree
*stmt_p
, struct rus_data
*data
)
1186 tree then_clause
, else_clause
, cond
;
1187 bool save_has_label
, then_has_label
, else_has_label
;
1189 save_has_label
= data
->has_label
;
1190 data
->has_label
= false;
1191 data
->last_goto
= NULL
;
1193 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p
), data
);
1195 then_has_label
= data
->has_label
;
1196 data
->has_label
= false;
1197 data
->last_goto
= NULL
;
1199 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p
), data
);
1201 else_has_label
= data
->has_label
;
1202 data
->has_label
= save_has_label
| then_has_label
| else_has_label
;
1204 then_clause
= COND_EXPR_THEN (*stmt_p
);
1205 else_clause
= COND_EXPR_ELSE (*stmt_p
);
1206 cond
= COND_EXPR_COND (*stmt_p
);
1208 /* If neither arm does anything at all, we can remove the whole IF. */
1209 if (!TREE_SIDE_EFFECTS (then_clause
) && !TREE_SIDE_EFFECTS (else_clause
))
1211 *stmt_p
= build_empty_stmt ();
1212 data
->repeat
= true;
1215 /* If there are no reachable statements in an arm, then we can
1216 zap the entire conditional. */
1217 else if (integer_nonzerop (cond
) && !else_has_label
)
1219 if (warn_notreached
)
1220 remove_useless_stmts_warn_notreached (else_clause
);
1221 *stmt_p
= then_clause
;
1222 data
->repeat
= true;
1224 else if (integer_zerop (cond
) && !then_has_label
)
1226 if (warn_notreached
)
1227 remove_useless_stmts_warn_notreached (then_clause
);
1228 *stmt_p
= else_clause
;
1229 data
->repeat
= true;
1232 /* Check a couple of simple things on then/else with single stmts. */
1235 tree then_stmt
= expr_only (then_clause
);
1236 tree else_stmt
= expr_only (else_clause
);
1238 /* Notice branches to a common destination. */
1239 if (then_stmt
&& else_stmt
1240 && TREE_CODE (then_stmt
) == GOTO_EXPR
1241 && TREE_CODE (else_stmt
) == GOTO_EXPR
1242 && (GOTO_DESTINATION (then_stmt
) == GOTO_DESTINATION (else_stmt
)))
1244 *stmt_p
= then_stmt
;
1245 data
->repeat
= true;
1248 /* If the THEN/ELSE clause merely assigns a value to a variable or
1249 parameter which is already known to contain that value, then
1250 remove the useless THEN/ELSE clause. */
1251 else if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1254 && TREE_CODE (else_stmt
) == MODIFY_EXPR
1255 && TREE_OPERAND (else_stmt
, 0) == cond
1256 && integer_zerop (TREE_OPERAND (else_stmt
, 1)))
1257 COND_EXPR_ELSE (*stmt_p
) = alloc_stmt_list ();
1259 else if ((TREE_CODE (cond
) == EQ_EXPR
|| TREE_CODE (cond
) == NE_EXPR
)
1260 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1261 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1262 && TREE_CONSTANT (TREE_OPERAND (cond
, 1)))
1264 tree stmt
= (TREE_CODE (cond
) == EQ_EXPR
1265 ? then_stmt
: else_stmt
);
1266 tree
*location
= (TREE_CODE (cond
) == EQ_EXPR
1267 ? &COND_EXPR_THEN (*stmt_p
)
1268 : &COND_EXPR_ELSE (*stmt_p
));
1271 && TREE_CODE (stmt
) == MODIFY_EXPR
1272 && TREE_OPERAND (stmt
, 0) == TREE_OPERAND (cond
, 0)
1273 && TREE_OPERAND (stmt
, 1) == TREE_OPERAND (cond
, 1))
1274 *location
= alloc_stmt_list ();
1278 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1279 would be re-introduced during lowering. */
1280 data
->last_goto
= NULL
;
1285 remove_useless_stmts_tf (tree
*stmt_p
, struct rus_data
*data
)
1287 bool save_may_branch
, save_may_throw
;
1288 bool this_may_branch
, this_may_throw
;
1290 /* Collect may_branch and may_throw information for the body only. */
1291 save_may_branch
= data
->may_branch
;
1292 save_may_throw
= data
->may_throw
;
1293 data
->may_branch
= false;
1294 data
->may_throw
= false;
1295 data
->last_goto
= NULL
;
1297 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1299 this_may_branch
= data
->may_branch
;
1300 this_may_throw
= data
->may_throw
;
1301 data
->may_branch
|= save_may_branch
;
1302 data
->may_throw
|= save_may_throw
;
1303 data
->last_goto
= NULL
;
1305 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1307 /* If the body is empty, then we can emit the FINALLY block without
1308 the enclosing TRY_FINALLY_EXPR. */
1309 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 0)))
1311 *stmt_p
= TREE_OPERAND (*stmt_p
, 1);
1312 data
->repeat
= true;
1315 /* If the handler is empty, then we can emit the TRY block without
1316 the enclosing TRY_FINALLY_EXPR. */
1317 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1319 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1320 data
->repeat
= true;
1323 /* If the body neither throws, nor branches, then we can safely
1324 string the TRY and FINALLY blocks together. */
1325 else if (!this_may_branch
&& !this_may_throw
)
1327 tree stmt
= *stmt_p
;
1328 *stmt_p
= TREE_OPERAND (stmt
, 0);
1329 append_to_statement_list (TREE_OPERAND (stmt
, 1), stmt_p
);
1330 data
->repeat
= true;
1336 remove_useless_stmts_tc (tree
*stmt_p
, struct rus_data
*data
)
1338 bool save_may_throw
, this_may_throw
;
1339 tree_stmt_iterator i
;
1342 /* Collect may_throw information for the body only. */
1343 save_may_throw
= data
->may_throw
;
1344 data
->may_throw
= false;
1345 data
->last_goto
= NULL
;
1347 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1349 this_may_throw
= data
->may_throw
;
1350 data
->may_throw
= save_may_throw
;
1352 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1353 if (!this_may_throw
)
1355 if (warn_notreached
)
1356 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p
, 1));
1357 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1358 data
->repeat
= true;
1362 /* Process the catch clause specially. We may be able to tell that
1363 no exceptions propagate past this point. */
1365 this_may_throw
= true;
1366 i
= tsi_start (TREE_OPERAND (*stmt_p
, 1));
1367 stmt
= tsi_stmt (i
);
1368 data
->last_goto
= NULL
;
1370 switch (TREE_CODE (stmt
))
1373 for (; !tsi_end_p (i
); tsi_next (&i
))
1375 stmt
= tsi_stmt (i
);
1376 /* If we catch all exceptions, then the body does not
1377 propagate exceptions past this point. */
1378 if (CATCH_TYPES (stmt
) == NULL
)
1379 this_may_throw
= false;
1380 data
->last_goto
= NULL
;
1381 remove_useless_stmts_1 (&CATCH_BODY (stmt
), data
);
1385 case EH_FILTER_EXPR
:
1386 if (EH_FILTER_MUST_NOT_THROW (stmt
))
1387 this_may_throw
= false;
1388 else if (EH_FILTER_TYPES (stmt
) == NULL
)
1389 this_may_throw
= false;
1390 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt
), data
);
1394 /* Otherwise this is a cleanup. */
1395 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1397 /* If the cleanup is empty, then we can emit the TRY block without
1398 the enclosing TRY_CATCH_EXPR. */
1399 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1401 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1402 data
->repeat
= true;
1406 data
->may_throw
|= this_may_throw
;
1411 remove_useless_stmts_bind (tree
*stmt_p
, struct rus_data
*data
)
1415 /* First remove anything underneath the BIND_EXPR. */
1416 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p
), data
);
1418 /* If the BIND_EXPR has no variables, then we can pull everything
1419 up one level and remove the BIND_EXPR, unless this is the toplevel
1420 BIND_EXPR for the current function or an inlined function.
1422 When this situation occurs we will want to apply this
1423 optimization again. */
1424 block
= BIND_EXPR_BLOCK (*stmt_p
);
1425 if (BIND_EXPR_VARS (*stmt_p
) == NULL_TREE
1426 && *stmt_p
!= DECL_SAVED_TREE (current_function_decl
)
1428 || ! BLOCK_ABSTRACT_ORIGIN (block
)
1429 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
))
1432 *stmt_p
= BIND_EXPR_BODY (*stmt_p
);
1433 data
->repeat
= true;
1439 remove_useless_stmts_goto (tree
*stmt_p
, struct rus_data
*data
)
1441 tree dest
= GOTO_DESTINATION (*stmt_p
);
1443 data
->may_branch
= true;
1444 data
->last_goto
= NULL
;
1446 /* Record the last goto expr, so that we can delete it if unnecessary. */
1447 if (TREE_CODE (dest
) == LABEL_DECL
)
1448 data
->last_goto
= stmt_p
;
1453 remove_useless_stmts_label (tree
*stmt_p
, struct rus_data
*data
)
1455 tree label
= LABEL_EXPR_LABEL (*stmt_p
);
1457 data
->has_label
= true;
1459 /* We do want to jump across non-local label receiver code. */
1460 if (DECL_NONLOCAL (label
))
1461 data
->last_goto
= NULL
;
1463 else if (data
->last_goto
&& GOTO_DESTINATION (*data
->last_goto
) == label
)
1465 *data
->last_goto
= build_empty_stmt ();
1466 data
->repeat
= true;
1469 /* ??? Add something here to delete unused labels. */
1473 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1474 decl. This allows us to eliminate redundant or useless
1475 calls to "const" functions.
1477 Gimplifier already does the same operation, but we may notice functions
1478 being const and pure once their calls has been gimplified, so we need
1479 to update the flag. */
1482 update_call_expr_flags (tree call
)
1484 tree decl
= get_callee_fndecl (call
);
1487 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1488 TREE_SIDE_EFFECTS (call
) = 0;
1489 if (TREE_NOTHROW (decl
))
1490 TREE_NOTHROW (call
) = 1;
1494 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1497 notice_special_calls (tree t
)
1499 int flags
= call_expr_flags (t
);
1501 if (flags
& ECF_MAY_BE_ALLOCA
)
1502 current_function_calls_alloca
= true;
1503 if (flags
& ECF_RETURNS_TWICE
)
1504 current_function_calls_setjmp
= true;
1508 /* Clear flags set by notice_special_calls. Used by dead code removal
1509 to update the flags. */
1512 clear_special_calls (void)
1514 current_function_calls_alloca
= false;
1515 current_function_calls_setjmp
= false;
1520 remove_useless_stmts_1 (tree
*tp
, struct rus_data
*data
)
1524 switch (TREE_CODE (t
))
1527 remove_useless_stmts_cond (tp
, data
);
1530 case TRY_FINALLY_EXPR
:
1531 remove_useless_stmts_tf (tp
, data
);
1534 case TRY_CATCH_EXPR
:
1535 remove_useless_stmts_tc (tp
, data
);
1539 remove_useless_stmts_bind (tp
, data
);
1543 remove_useless_stmts_goto (tp
, data
);
1547 remove_useless_stmts_label (tp
, data
);
1552 data
->last_goto
= NULL
;
1553 data
->may_branch
= true;
1558 data
->last_goto
= NULL
;
1559 notice_special_calls (t
);
1560 update_call_expr_flags (t
);
1561 if (tree_could_throw_p (t
))
1562 data
->may_throw
= true;
1566 data
->last_goto
= NULL
;
1568 op
= get_call_expr_in (t
);
1571 update_call_expr_flags (op
);
1572 notice_special_calls (op
);
1574 if (tree_could_throw_p (t
))
1575 data
->may_throw
= true;
1578 case STATEMENT_LIST
:
1580 tree_stmt_iterator i
= tsi_start (t
);
1581 while (!tsi_end_p (i
))
1584 if (IS_EMPTY_STMT (t
))
1590 remove_useless_stmts_1 (tsi_stmt_ptr (i
), data
);
1593 if (TREE_CODE (t
) == STATEMENT_LIST
)
1595 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1605 data
->last_goto
= NULL
;
1609 data
->last_goto
= NULL
;
1615 remove_useless_stmts (void)
1617 struct rus_data data
;
1619 clear_special_calls ();
1623 memset (&data
, 0, sizeof (data
));
1624 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl
), &data
);
1626 while (data
.repeat
);
1630 struct tree_opt_pass pass_remove_useless_stmts
=
1632 "useless", /* name */
1634 remove_useless_stmts
, /* execute */
1637 0, /* static_pass_number */
1639 PROP_gimple_any
, /* properties_required */
1640 0, /* properties_provided */
1641 0, /* properties_destroyed */
1642 0, /* todo_flags_start */
1643 TODO_dump_func
, /* todo_flags_finish */
1648 /* Remove obviously useless statements in basic block BB. */
1651 cfg_remove_useless_stmts_bb (basic_block bb
)
1653 block_stmt_iterator bsi
;
1654 tree stmt
= NULL_TREE
;
1655 tree cond
, var
= NULL_TREE
, val
= NULL_TREE
;
1656 struct var_ann_d
*ann
;
1658 /* Check whether we come here from a condition, and if so, get the
1660 if (EDGE_COUNT (bb
->preds
) != 1
1661 || !(EDGE_PRED (bb
, 0)->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
1664 cond
= COND_EXPR_COND (last_stmt (EDGE_PRED (bb
, 0)->src
));
1666 if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1669 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1670 ? boolean_false_node
: boolean_true_node
);
1672 else if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
1673 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1674 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
))
1676 var
= TREE_OPERAND (cond
, 0);
1677 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1678 ? boolean_true_node
: boolean_false_node
);
1682 if (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
)
1683 cond
= invert_truthvalue (cond
);
1684 if (TREE_CODE (cond
) == EQ_EXPR
1685 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1686 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1687 && (TREE_CODE (TREE_OPERAND (cond
, 1)) == VAR_DECL
1688 || TREE_CODE (TREE_OPERAND (cond
, 1)) == PARM_DECL
1689 || TREE_CONSTANT (TREE_OPERAND (cond
, 1))))
1691 var
= TREE_OPERAND (cond
, 0);
1692 val
= TREE_OPERAND (cond
, 1);
1698 /* Only work for normal local variables. */
1699 ann
= var_ann (var
);
1702 || TREE_ADDRESSABLE (var
))
1705 if (! TREE_CONSTANT (val
))
1707 ann
= var_ann (val
);
1710 || TREE_ADDRESSABLE (val
))
1714 /* Ignore floating point variables, since comparison behaves weird for
1716 if (FLOAT_TYPE_P (TREE_TYPE (var
)))
1719 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
);)
1721 stmt
= bsi_stmt (bsi
);
1723 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1724 which is already known to contain that value, then remove the useless
1725 THEN/ELSE clause. */
1726 if (TREE_CODE (stmt
) == MODIFY_EXPR
1727 && TREE_OPERAND (stmt
, 0) == var
1728 && operand_equal_p (val
, TREE_OPERAND (stmt
, 1), 0))
1734 /* Invalidate the var if we encounter something that could modify it.
1735 Likewise for the value it was previously set to. Note that we only
1736 consider values that are either a VAR_DECL or PARM_DECL so we
1737 can test for conflict very simply. */
1738 if (TREE_CODE (stmt
) == ASM_EXPR
1739 || (TREE_CODE (stmt
) == MODIFY_EXPR
1740 && (TREE_OPERAND (stmt
, 0) == var
1741 || TREE_OPERAND (stmt
, 0) == val
)))
1749 /* A CFG-aware version of remove_useless_stmts. */
1752 cfg_remove_useless_stmts (void)
1756 #ifdef ENABLE_CHECKING
1757 verify_flow_info ();
1762 cfg_remove_useless_stmts_bb (bb
);
1767 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1770 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1774 /* Since this block is no longer reachable, we can just delete all
1775 of its PHI nodes. */
1776 phi
= phi_nodes (bb
);
1779 tree next
= PHI_CHAIN (phi
);
1780 remove_phi_node (phi
, NULL_TREE
, bb
);
1784 /* Remove edges to BB's successors. */
1785 while (EDGE_COUNT (bb
->succs
) > 0)
1786 ssa_remove_edge (EDGE_SUCC (bb
, 0));
1790 /* Remove statements of basic block BB. */
1793 remove_bb (basic_block bb
)
1795 block_stmt_iterator i
;
1796 source_locus loc
= 0;
1800 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1801 if (dump_flags
& TDF_DETAILS
)
1803 dump_bb (bb
, dump_file
, 0);
1804 fprintf (dump_file
, "\n");
1808 /* Remove all the instructions in the block. */
1809 for (i
= bsi_start (bb
); !bsi_end_p (i
);)
1811 tree stmt
= bsi_stmt (i
);
1812 if (TREE_CODE (stmt
) == LABEL_EXPR
1813 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt
)))
1815 basic_block new_bb
= bb
->prev_bb
;
1816 block_stmt_iterator new_bsi
= bsi_after_labels (new_bb
);
1819 bsi_insert_after (&new_bsi
, stmt
, BSI_NEW_STMT
);
1823 release_defs (stmt
);
1825 set_bb_for_stmt (stmt
, NULL
);
1829 /* Don't warn for removed gotos. Gotos are often removed due to
1830 jump threading, thus resulting in bogus warnings. Not great,
1831 since this way we lose warnings for gotos in the original
1832 program that are indeed unreachable. */
1833 if (TREE_CODE (stmt
) != GOTO_EXPR
&& EXPR_HAS_LOCATION (stmt
) && !loc
)
1834 #ifdef USE_MAPPED_LOCATION
1835 loc
= EXPR_LOCATION (stmt
);
1837 loc
= EXPR_LOCUS (stmt
);
1841 /* If requested, give a warning that the first statement in the
1842 block is unreachable. We walk statements backwards in the
1843 loop above, so the last statement we process is the first statement
1845 if (warn_notreached
&& loc
)
1846 #ifdef USE_MAPPED_LOCATION
1847 warning ("%Hwill never be executed", &loc
);
1849 warning ("%Hwill never be executed", loc
);
1852 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1855 /* Try to remove superfluous control structures. */
1858 cleanup_control_flow (void)
1861 block_stmt_iterator bsi
;
1862 bool retval
= false;
1867 bsi
= bsi_last (bb
);
1869 if (bsi_end_p (bsi
))
1872 stmt
= bsi_stmt (bsi
);
1873 if (TREE_CODE (stmt
) == COND_EXPR
1874 || TREE_CODE (stmt
) == SWITCH_EXPR
)
1875 retval
|= cleanup_control_expr_graph (bb
, bsi
);
1881 /* Disconnect an unreachable block in the control expression starting
1885 cleanup_control_expr_graph (basic_block bb
, block_stmt_iterator bsi
)
1888 bool retval
= false;
1889 tree expr
= bsi_stmt (bsi
), val
;
1891 if (EDGE_COUNT (bb
->succs
) > 1)
1896 switch (TREE_CODE (expr
))
1899 val
= COND_EXPR_COND (expr
);
1903 val
= SWITCH_COND (expr
);
1904 if (TREE_CODE (val
) != INTEGER_CST
)
1912 taken_edge
= find_taken_edge (bb
, val
);
1916 /* Remove all the edges except the one that is always executed. */
1917 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
1919 if (e
!= taken_edge
)
1921 taken_edge
->probability
+= e
->probability
;
1922 taken_edge
->count
+= e
->count
;
1923 ssa_remove_edge (e
);
1929 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
1930 taken_edge
->probability
= REG_BR_PROB_BASE
;
1933 taken_edge
= EDGE_SUCC (bb
, 0);
1936 taken_edge
->flags
= EDGE_FALLTHRU
;
1938 /* We removed some paths from the cfg. */
1939 free_dominance_info (CDI_DOMINATORS
);
1945 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1946 predicate VAL, return the edge that will be taken out of the block.
1947 If VAL does not match a unique edge, NULL is returned. */
1950 find_taken_edge (basic_block bb
, tree val
)
1954 stmt
= last_stmt (bb
);
1957 gcc_assert (is_ctrl_stmt (stmt
));
1960 /* If VAL is a predicate of the form N RELOP N, where N is an
1961 SSA_NAME, we can usually determine its truth value. */
1962 if (COMPARISON_CLASS_P (val
))
1965 /* If VAL is not a constant, we can't determine which edge might
1967 if (!really_constant_p (val
))
1970 if (TREE_CODE (stmt
) == COND_EXPR
)
1971 return find_taken_edge_cond_expr (bb
, val
);
1973 if (TREE_CODE (stmt
) == SWITCH_EXPR
)
1974 return find_taken_edge_switch_expr (bb
, val
);
1980 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1981 statement, determine which of the two edges will be taken out of the
1982 block. Return NULL if either edge may be taken. */
1985 find_taken_edge_cond_expr (basic_block bb
, tree val
)
1987 edge true_edge
, false_edge
;
1989 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1991 /* Otherwise, try to determine which branch of the if() will be taken.
1992 If VAL is a constant but it can't be reduced to a 0 or a 1, then
1993 we don't really know which edge will be taken at runtime. This
1994 may happen when comparing addresses (e.g., if (&var1 == 4)). */
1995 if (integer_nonzerop (val
))
1997 else if (integer_zerop (val
))
2004 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2005 statement, determine which edge will be taken out of the block. Return
2006 NULL if any edge may be taken. */
2009 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2011 tree switch_expr
, taken_case
;
2012 basic_block dest_bb
;
2015 if (TREE_CODE (val
) != INTEGER_CST
)
2018 switch_expr
= last_stmt (bb
);
2019 taken_case
= find_case_label_for_value (switch_expr
, val
);
2020 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2022 e
= find_edge (bb
, dest_bb
);
2028 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2029 We can make optimal use here of the fact that the case labels are
2030 sorted: We can do a binary search for a case matching VAL. */
2033 find_case_label_for_value (tree switch_expr
, tree val
)
2035 tree vec
= SWITCH_LABELS (switch_expr
);
2036 size_t low
, high
, n
= TREE_VEC_LENGTH (vec
);
2037 tree default_case
= TREE_VEC_ELT (vec
, n
- 1);
2039 for (low
= -1, high
= n
- 1; high
- low
> 1; )
2041 size_t i
= (high
+ low
) / 2;
2042 tree t
= TREE_VEC_ELT (vec
, i
);
2045 /* Cache the result of comparing CASE_LOW and val. */
2046 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2053 if (CASE_HIGH (t
) == NULL
)
2055 /* A singe-valued case label. */
2061 /* A case range. We can only handle integer ranges. */
2062 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2067 return default_case
;
2071 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2072 those alternatives are equal in each of the PHI nodes, then return
2073 true, else return false. */
2076 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
2078 tree phi
, val1
, val2
;
2081 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
2083 n1
= phi_arg_from_edge (phi
, e1
);
2084 n2
= phi_arg_from_edge (phi
, e2
);
2086 gcc_assert (n1
>= 0);
2087 gcc_assert (n2
>= 0);
2089 val1
= PHI_ARG_DEF (phi
, n1
);
2090 val2
= PHI_ARG_DEF (phi
, n2
);
2092 if (!operand_equal_p (val1
, val2
, 0))
2100 /*---------------------------------------------------------------------------
2102 ---------------------------------------------------------------------------*/
2104 /* Dump tree-specific information of block BB to file OUTF. */
2107 tree_dump_bb (basic_block bb
, FILE *outf
, int indent
)
2109 dump_generic_bb (outf
, bb
, indent
, TDF_VOPS
);
2113 /* Dump a basic block on stderr. */
2116 debug_tree_bb (basic_block bb
)
2118 dump_bb (bb
, stderr
, 0);
2122 /* Dump basic block with index N on stderr. */
2125 debug_tree_bb_n (int n
)
2127 debug_tree_bb (BASIC_BLOCK (n
));
2128 return BASIC_BLOCK (n
);
2132 /* Dump the CFG on stderr.
2134 FLAGS are the same used by the tree dumping functions
2135 (see TDF_* in tree.h). */
2138 debug_tree_cfg (int flags
)
2140 dump_tree_cfg (stderr
, flags
);
2144 /* Dump the program showing basic block boundaries on the given FILE.
2146 FLAGS are the same used by the tree dumping functions (see TDF_* in
2150 dump_tree_cfg (FILE *file
, int flags
)
2152 if (flags
& TDF_DETAILS
)
2154 const char *funcname
2155 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2158 fprintf (file
, ";; Function %s\n\n", funcname
);
2159 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2160 n_basic_blocks
, n_edges
, last_basic_block
);
2162 brief_dump_cfg (file
);
2163 fprintf (file
, "\n");
2166 if (flags
& TDF_STATS
)
2167 dump_cfg_stats (file
);
2169 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2173 /* Dump CFG statistics on FILE. */
2176 dump_cfg_stats (FILE *file
)
2178 static long max_num_merged_labels
= 0;
2179 unsigned long size
, total
= 0;
2182 const char * const fmt_str
= "%-30s%-13s%12s\n";
2183 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2184 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2185 const char *funcname
2186 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2189 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2191 fprintf (file
, "---------------------------------------------------------\n");
2192 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2193 fprintf (file
, fmt_str
, "", " instances ", "used ");
2194 fprintf (file
, "---------------------------------------------------------\n");
2196 size
= n_basic_blocks
* sizeof (struct basic_block_def
);
2198 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks
,
2199 SCALE (size
), LABEL (size
));
2203 n_edges
+= EDGE_COUNT (bb
->succs
);
2204 size
= n_edges
* sizeof (struct edge_def
);
2206 fprintf (file
, fmt_str_1
, "Edges", n_edges
, SCALE (size
), LABEL (size
));
2208 size
= n_basic_blocks
* sizeof (struct bb_ann_d
);
2210 fprintf (file
, fmt_str_1
, "Basic block annotations", n_basic_blocks
,
2211 SCALE (size
), LABEL (size
));
2213 fprintf (file
, "---------------------------------------------------------\n");
2214 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2216 fprintf (file
, "---------------------------------------------------------\n");
2217 fprintf (file
, "\n");
2219 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2220 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2222 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2223 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2225 fprintf (file
, "\n");
2229 /* Dump CFG statistics on stderr. Keep extern so that it's always
2230 linked in the final executable. */
2233 debug_cfg_stats (void)
2235 dump_cfg_stats (stderr
);
2239 /* Dump the flowgraph to a .vcg FILE. */
2242 tree_cfg2vcg (FILE *file
)
2247 const char *funcname
2248 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2250 /* Write the file header. */
2251 fprintf (file
, "graph: { title: \"%s\"\n", funcname
);
2252 fprintf (file
, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2253 fprintf (file
, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2255 /* Write blocks and edges. */
2256 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2258 fprintf (file
, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2261 if (e
->flags
& EDGE_FAKE
)
2262 fprintf (file
, " linestyle: dotted priority: 10");
2264 fprintf (file
, " linestyle: solid priority: 100");
2266 fprintf (file
, " }\n");
2272 enum tree_code head_code
, end_code
;
2273 const char *head_name
, *end_name
;
2276 tree first
= first_stmt (bb
);
2277 tree last
= last_stmt (bb
);
2281 head_code
= TREE_CODE (first
);
2282 head_name
= tree_code_name
[head_code
];
2283 head_line
= get_lineno (first
);
2286 head_name
= "no-statement";
2290 end_code
= TREE_CODE (last
);
2291 end_name
= tree_code_name
[end_code
];
2292 end_line
= get_lineno (last
);
2295 end_name
= "no-statement";
2297 fprintf (file
, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2298 bb
->index
, bb
->index
, head_name
, head_line
, end_name
,
2301 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2303 if (e
->dest
== EXIT_BLOCK_PTR
)
2304 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb
->index
);
2306 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb
->index
, e
->dest
->index
);
2308 if (e
->flags
& EDGE_FAKE
)
2309 fprintf (file
, " priority: 10 linestyle: dotted");
2311 fprintf (file
, " priority: 100 linestyle: solid");
2313 fprintf (file
, " }\n");
2316 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2320 fputs ("}\n\n", file
);
2325 /*---------------------------------------------------------------------------
2326 Miscellaneous helpers
2327 ---------------------------------------------------------------------------*/
2329 /* Return true if T represents a stmt that always transfers control. */
2332 is_ctrl_stmt (tree t
)
2334 return (TREE_CODE (t
) == COND_EXPR
2335 || TREE_CODE (t
) == SWITCH_EXPR
2336 || TREE_CODE (t
) == GOTO_EXPR
2337 || TREE_CODE (t
) == RETURN_EXPR
2338 || TREE_CODE (t
) == RESX_EXPR
);
2342 /* Return true if T is a statement that may alter the flow of control
2343 (e.g., a call to a non-returning function). */
2346 is_ctrl_altering_stmt (tree t
)
2351 call
= get_call_expr_in (t
);
2354 /* A non-pure/const CALL_EXPR alters flow control if the current
2355 function has nonlocal labels. */
2356 if (TREE_SIDE_EFFECTS (call
) && current_function_has_nonlocal_label
)
2359 /* A CALL_EXPR also alters control flow if it does not return. */
2360 if (call_expr_flags (call
) & (ECF_NORETURN
| ECF_LONGJMP
))
2364 /* If a statement can throw, it alters control flow. */
2365 return tree_can_throw_internal (t
);
2369 /* Return true if T is a computed goto. */
2372 computed_goto_p (tree t
)
2374 return (TREE_CODE (t
) == GOTO_EXPR
2375 && TREE_CODE (GOTO_DESTINATION (t
)) != LABEL_DECL
);
2379 /* Checks whether EXPR is a simple local goto. */
2382 simple_goto_p (tree expr
)
2384 return (TREE_CODE (expr
) == GOTO_EXPR
2385 && TREE_CODE (GOTO_DESTINATION (expr
)) == LABEL_DECL
);
2389 /* Return true if T should start a new basic block. PREV_T is the
2390 statement preceding T. It is used when T is a label or a case label.
2391 Labels should only start a new basic block if their previous statement
2392 wasn't a label. Otherwise, sequence of labels would generate
2393 unnecessary basic blocks that only contain a single label. */
2396 stmt_starts_bb_p (tree t
, tree prev_t
)
2398 enum tree_code code
;
2403 /* LABEL_EXPRs start a new basic block only if the preceding
2404 statement wasn't a label of the same type. This prevents the
2405 creation of consecutive blocks that have nothing but a single
2407 code
= TREE_CODE (t
);
2408 if (code
== LABEL_EXPR
)
2410 /* Nonlocal and computed GOTO targets always start a new block. */
2411 if (code
== LABEL_EXPR
2412 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t
))
2413 || FORCED_LABEL (LABEL_EXPR_LABEL (t
))))
2416 if (prev_t
&& TREE_CODE (prev_t
) == code
)
2418 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t
)))
2421 cfg_stats
.num_merged_labels
++;
2432 /* Return true if T should end a basic block. */
2435 stmt_ends_bb_p (tree t
)
2437 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2441 /* Add gotos that used to be represented implicitly in the CFG. */
2444 disband_implicit_edges (void)
2447 block_stmt_iterator last
;
2454 last
= bsi_last (bb
);
2455 stmt
= last_stmt (bb
);
2457 if (stmt
&& TREE_CODE (stmt
) == COND_EXPR
)
2459 /* Remove superfluous gotos from COND_EXPR branches. Moved
2460 from cfg_remove_useless_stmts here since it violates the
2461 invariants for tree--cfg correspondence and thus fits better
2462 here where we do it anyway. */
2463 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2465 if (e
->dest
!= bb
->next_bb
)
2468 if (e
->flags
& EDGE_TRUE_VALUE
)
2469 COND_EXPR_THEN (stmt
) = build_empty_stmt ();
2470 else if (e
->flags
& EDGE_FALSE_VALUE
)
2471 COND_EXPR_ELSE (stmt
) = build_empty_stmt ();
2474 e
->flags
|= EDGE_FALLTHRU
;
2480 if (stmt
&& TREE_CODE (stmt
) == RETURN_EXPR
)
2482 /* Remove the RETURN_EXPR if we may fall though to the exit
2484 gcc_assert (EDGE_COUNT (bb
->succs
) == 1);
2485 gcc_assert (EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
);
2487 if (bb
->next_bb
== EXIT_BLOCK_PTR
2488 && !TREE_OPERAND (stmt
, 0))
2491 EDGE_SUCC (bb
, 0)->flags
|= EDGE_FALLTHRU
;
2496 /* There can be no fallthru edge if the last statement is a control
2498 if (stmt
&& is_ctrl_stmt (stmt
))
2501 /* Find a fallthru edge and emit the goto if necessary. */
2502 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2503 if (e
->flags
& EDGE_FALLTHRU
)
2506 if (!e
|| e
->dest
== bb
->next_bb
)
2509 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
2510 label
= tree_block_label (e
->dest
);
2512 stmt
= build1 (GOTO_EXPR
, void_type_node
, label
);
2513 #ifdef USE_MAPPED_LOCATION
2514 SET_EXPR_LOCATION (stmt
, e
->goto_locus
);
2516 SET_EXPR_LOCUS (stmt
, e
->goto_locus
);
2518 bsi_insert_after (&last
, stmt
, BSI_NEW_STMT
);
2519 e
->flags
&= ~EDGE_FALLTHRU
;
2523 /* Remove block annotations and other datastructures. */
2526 delete_tree_cfg_annotations (void)
2529 if (n_basic_blocks
> 0)
2530 free_blocks_annotations ();
2532 label_to_block_map
= NULL
;
2539 /* Return the first statement in basic block BB. */
2542 first_stmt (basic_block bb
)
2544 block_stmt_iterator i
= bsi_start (bb
);
2545 return !bsi_end_p (i
) ? bsi_stmt (i
) : NULL_TREE
;
2549 /* Return the last statement in basic block BB. */
2552 last_stmt (basic_block bb
)
2554 block_stmt_iterator b
= bsi_last (bb
);
2555 return !bsi_end_p (b
) ? bsi_stmt (b
) : NULL_TREE
;
2559 /* Return a pointer to the last statement in block BB. */
2562 last_stmt_ptr (basic_block bb
)
2564 block_stmt_iterator last
= bsi_last (bb
);
2565 return !bsi_end_p (last
) ? bsi_stmt_ptr (last
) : NULL
;
2569 /* Return the last statement of an otherwise empty block. Return NULL
2570 if the block is totally empty, or if it contains more than one
2574 last_and_only_stmt (basic_block bb
)
2576 block_stmt_iterator i
= bsi_last (bb
);
2582 last
= bsi_stmt (i
);
2587 /* Empty statements should no longer appear in the instruction stream.
2588 Everything that might have appeared before should be deleted by
2589 remove_useless_stmts, and the optimizers should just bsi_remove
2590 instead of smashing with build_empty_stmt.
2592 Thus the only thing that should appear here in a block containing
2593 one executable statement is a label. */
2594 prev
= bsi_stmt (i
);
2595 if (TREE_CODE (prev
) == LABEL_EXPR
)
2602 /* Mark BB as the basic block holding statement T. */
2605 set_bb_for_stmt (tree t
, basic_block bb
)
2607 if (TREE_CODE (t
) == PHI_NODE
)
2609 else if (TREE_CODE (t
) == STATEMENT_LIST
)
2611 tree_stmt_iterator i
;
2612 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2613 set_bb_for_stmt (tsi_stmt (i
), bb
);
2617 stmt_ann_t ann
= get_stmt_ann (t
);
2620 /* If the statement is a label, add the label to block-to-labels map
2621 so that we can speed up edge creation for GOTO_EXPRs. */
2622 if (TREE_CODE (t
) == LABEL_EXPR
)
2626 t
= LABEL_EXPR_LABEL (t
);
2627 uid
= LABEL_DECL_UID (t
);
2630 LABEL_DECL_UID (t
) = uid
= cfun
->last_label_uid
++;
2631 if (VARRAY_SIZE (label_to_block_map
) <= (unsigned) uid
)
2632 VARRAY_GROW (label_to_block_map
, 3 * uid
/ 2);
2635 /* We're moving an existing label. Make sure that we've
2636 removed it from the old block. */
2637 gcc_assert (!bb
|| !VARRAY_BB (label_to_block_map
, uid
));
2638 VARRAY_BB (label_to_block_map
, uid
) = bb
;
2643 /* Finds iterator for STMT. */
2645 extern block_stmt_iterator
2646 bsi_for_stmt (tree stmt
)
2648 block_stmt_iterator bsi
;
2650 for (bsi
= bsi_start (bb_for_stmt (stmt
)); !bsi_end_p (bsi
); bsi_next (&bsi
))
2651 if (bsi_stmt (bsi
) == stmt
)
2657 /* Insert statement (or statement list) T before the statement
2658 pointed-to by iterator I. M specifies how to update iterator I
2659 after insertion (see enum bsi_iterator_update). */
2662 bsi_insert_before (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2664 set_bb_for_stmt (t
, i
->bb
);
2665 tsi_link_before (&i
->tsi
, t
, m
);
2670 /* Insert statement (or statement list) T after the statement
2671 pointed-to by iterator I. M specifies how to update iterator I
2672 after insertion (see enum bsi_iterator_update). */
2675 bsi_insert_after (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2677 set_bb_for_stmt (t
, i
->bb
);
2678 tsi_link_after (&i
->tsi
, t
, m
);
2683 /* Remove the statement pointed to by iterator I. The iterator is updated
2684 to the next statement. */
2687 bsi_remove (block_stmt_iterator
*i
)
2689 tree t
= bsi_stmt (*i
);
2690 set_bb_for_stmt (t
, NULL
);
2691 tsi_delink (&i
->tsi
);
2695 /* Move the statement at FROM so it comes right after the statement at TO. */
2698 bsi_move_after (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2700 tree stmt
= bsi_stmt (*from
);
2702 bsi_insert_after (to
, stmt
, BSI_SAME_STMT
);
2706 /* Move the statement at FROM so it comes right before the statement at TO. */
2709 bsi_move_before (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2711 tree stmt
= bsi_stmt (*from
);
2713 bsi_insert_before (to
, stmt
, BSI_SAME_STMT
);
2717 /* Move the statement at FROM to the end of basic block BB. */
2720 bsi_move_to_bb_end (block_stmt_iterator
*from
, basic_block bb
)
2722 block_stmt_iterator last
= bsi_last (bb
);
2724 /* Have to check bsi_end_p because it could be an empty block. */
2725 if (!bsi_end_p (last
) && is_ctrl_stmt (bsi_stmt (last
)))
2726 bsi_move_before (from
, &last
);
2728 bsi_move_after (from
, &last
);
2732 /* Replace the contents of the statement pointed to by iterator BSI
2733 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2734 information of the original statement is preserved. */
2737 bsi_replace (const block_stmt_iterator
*bsi
, tree stmt
, bool preserve_eh_info
)
2740 tree orig_stmt
= bsi_stmt (*bsi
);
2742 SET_EXPR_LOCUS (stmt
, EXPR_LOCUS (orig_stmt
));
2743 set_bb_for_stmt (stmt
, bsi
->bb
);
2745 /* Preserve EH region information from the original statement, if
2746 requested by the caller. */
2747 if (preserve_eh_info
)
2749 eh_region
= lookup_stmt_eh_region (orig_stmt
);
2751 add_stmt_to_eh_region (stmt
, eh_region
);
2754 *bsi_stmt_ptr (*bsi
) = stmt
;
2759 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2760 is made to place the statement in an existing basic block, but
2761 sometimes that isn't possible. When it isn't possible, the edge is
2762 split and the statement is added to the new block.
2764 In all cases, the returned *BSI points to the correct location. The
2765 return value is true if insertion should be done after the location,
2766 or false if it should be done before the location. If new basic block
2767 has to be created, it is stored in *NEW_BB. */
2770 tree_find_edge_insert_loc (edge e
, block_stmt_iterator
*bsi
,
2771 basic_block
*new_bb
)
2773 basic_block dest
, src
;
2779 /* If the destination has one predecessor which has no PHI nodes,
2780 insert there. Except for the exit block.
2782 The requirement for no PHI nodes could be relaxed. Basically we
2783 would have to examine the PHIs to prove that none of them used
2784 the value set by the statement we want to insert on E. That
2785 hardly seems worth the effort. */
2786 if (EDGE_COUNT (dest
->preds
) == 1
2787 && ! phi_nodes (dest
)
2788 && dest
!= EXIT_BLOCK_PTR
)
2790 *bsi
= bsi_start (dest
);
2791 if (bsi_end_p (*bsi
))
2794 /* Make sure we insert after any leading labels. */
2795 tmp
= bsi_stmt (*bsi
);
2796 while (TREE_CODE (tmp
) == LABEL_EXPR
)
2799 if (bsi_end_p (*bsi
))
2801 tmp
= bsi_stmt (*bsi
);
2804 if (bsi_end_p (*bsi
))
2806 *bsi
= bsi_last (dest
);
2813 /* If the source has one successor, the edge is not abnormal and
2814 the last statement does not end a basic block, insert there.
2815 Except for the entry block. */
2817 if ((e
->flags
& EDGE_ABNORMAL
) == 0
2818 && EDGE_COUNT (src
->succs
) == 1
2819 && src
!= ENTRY_BLOCK_PTR
)
2821 *bsi
= bsi_last (src
);
2822 if (bsi_end_p (*bsi
))
2825 tmp
= bsi_stmt (*bsi
);
2826 if (!stmt_ends_bb_p (tmp
))
2829 /* Insert code just before returning the value. We may need to decompose
2830 the return in the case it contains non-trivial operand. */
2831 if (TREE_CODE (tmp
) == RETURN_EXPR
)
2833 tree op
= TREE_OPERAND (tmp
, 0);
2834 if (!is_gimple_val (op
))
2836 gcc_assert (TREE_CODE (op
) == MODIFY_EXPR
);
2837 bsi_insert_before (bsi
, op
, BSI_NEW_STMT
);
2838 TREE_OPERAND (tmp
, 0) = TREE_OPERAND (op
, 0);
2845 /* Otherwise, create a new basic block, and split this edge. */
2846 dest
= split_edge (e
);
2849 e
= EDGE_PRED (dest
, 0);
2854 /* This routine will commit all pending edge insertions, creating any new
2855 basic blocks which are necessary.
2857 If specified, NEW_BLOCKS returns a count of the number of new basic
2858 blocks which were created. */
2861 bsi_commit_edge_inserts (int *new_blocks
)
2868 blocks
= n_basic_blocks
;
2870 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR
, 0), NULL
);
2873 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2874 bsi_commit_one_edge_insert (e
, NULL
);
2877 *new_blocks
= n_basic_blocks
- blocks
;
2881 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2882 to this block, otherwise set it to NULL. */
2885 bsi_commit_one_edge_insert (edge e
, basic_block
*new_bb
)
2889 if (PENDING_STMT (e
))
2891 block_stmt_iterator bsi
;
2892 tree stmt
= PENDING_STMT (e
);
2894 PENDING_STMT (e
) = NULL_TREE
;
2896 if (tree_find_edge_insert_loc (e
, &bsi
, new_bb
))
2897 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
2899 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
2904 /* Add STMT to the pending list of edge E. No actual insertion is
2905 made until a call to bsi_commit_edge_inserts () is made. */
2908 bsi_insert_on_edge (edge e
, tree stmt
)
2910 append_to_statement_list (stmt
, &PENDING_STMT (e
));
2913 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
2914 be created, it is returned. */
2917 bsi_insert_on_edge_immediate (edge e
, tree stmt
)
2919 block_stmt_iterator bsi
;
2920 basic_block new_bb
= NULL
;
2922 gcc_assert (!PENDING_STMT (e
));
2924 if (tree_find_edge_insert_loc (e
, &bsi
, &new_bb
))
2925 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
2927 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
2932 /*---------------------------------------------------------------------------
2933 Tree specific functions for CFG manipulation
2934 ---------------------------------------------------------------------------*/
2936 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2937 Abort on abnormal edges. */
2940 tree_split_edge (edge edge_in
)
2942 basic_block new_bb
, after_bb
, dest
, src
;
2948 /* Abnormal edges cannot be split. */
2949 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2952 dest
= edge_in
->dest
;
2954 /* Place the new block in the block list. Try to keep the new block
2955 near its "logical" location. This is of most help to humans looking
2956 at debugging dumps. */
2957 FOR_EACH_EDGE (e
, ei
, dest
->preds
)
2958 if (e
->src
->next_bb
== dest
)
2961 after_bb
= dest
->prev_bb
;
2963 after_bb
= edge_in
->src
;
2965 new_bb
= create_empty_bb (after_bb
);
2966 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2967 new_bb
->count
= edge_in
->count
;
2968 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2969 new_edge
->probability
= REG_BR_PROB_BASE
;
2970 new_edge
->count
= edge_in
->count
;
2972 /* Find all the PHI arguments on the original edge, and change them to
2973 the new edge. Do it before redirection, so that the argument does not
2975 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
2977 num_elem
= PHI_NUM_ARGS (phi
);
2978 for (i
= 0; i
< num_elem
; i
++)
2979 if (PHI_ARG_EDGE (phi
, i
) == edge_in
)
2981 PHI_ARG_EDGE (phi
, i
) = new_edge
;
2986 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2988 gcc_assert (!PENDING_STMT (edge_in
));
2994 /* Return true when BB has label LABEL in it. */
2997 has_label_p (basic_block bb
, tree label
)
2999 block_stmt_iterator bsi
;
3001 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3003 tree stmt
= bsi_stmt (bsi
);
3005 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3007 if (LABEL_EXPR_LABEL (stmt
) == label
)
3014 /* Callback for walk_tree, check that all elements with address taken are
3015 properly noticed as such. */
3018 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3025 /* Check operand N for being valid GIMPLE and give error MSG if not.
3026 We check for constants explicitly since they are not considered
3027 gimple invariants if they overflowed. */
3028 #define CHECK_OP(N, MSG) \
3029 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3030 && !is_gimple_val (TREE_OPERAND (t, N))) \
3031 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3033 switch (TREE_CODE (t
))
3036 if (SSA_NAME_IN_FREE_LIST (t
))
3038 error ("SSA name in freelist but still referenced");
3044 x
= TREE_OPERAND (t
, 0);
3045 if (TREE_CODE (x
) == BIT_FIELD_REF
3046 && is_gimple_reg (TREE_OPERAND (x
, 0)))
3048 error ("GIMPLE register modified with BIT_FIELD_REF");
3054 /* Skip any references (they will be checked when we recurse down the
3055 tree) and ensure that any variable used as a prefix is marked
3057 for (x
= TREE_OPERAND (t
, 0);
3058 (handled_component_p (x
)
3059 || TREE_CODE (x
) == REALPART_EXPR
3060 || TREE_CODE (x
) == IMAGPART_EXPR
);
3061 x
= TREE_OPERAND (x
, 0))
3064 if (TREE_CODE (x
) != VAR_DECL
&& TREE_CODE (x
) != PARM_DECL
)
3066 if (!TREE_ADDRESSABLE (x
))
3068 error ("address taken, but ADDRESSABLE bit not set");
3074 x
= TREE_OPERAND (t
, 0);
3075 if (TREE_CODE (TREE_TYPE (x
)) != BOOLEAN_TYPE
)
3077 error ("non-boolean used in condition");
3084 case FIX_TRUNC_EXPR
:
3086 case FIX_FLOOR_EXPR
:
3087 case FIX_ROUND_EXPR
:
3092 case NON_LVALUE_EXPR
:
3093 case TRUTH_NOT_EXPR
:
3094 CHECK_OP (0, "Invalid operand to unary operator");
3101 case ARRAY_RANGE_REF
:
3103 case VIEW_CONVERT_EXPR
:
3104 /* We have a nest of references. Verify that each of the operands
3105 that determine where to reference is either a constant or a variable,
3106 verify that the base is valid, and then show we've already checked
3108 while (TREE_CODE (t
) == REALPART_EXPR
|| TREE_CODE (t
) == IMAGPART_EXPR
3109 || handled_component_p (t
))
3111 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3112 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3113 else if (TREE_CODE (t
) == ARRAY_REF
3114 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3116 CHECK_OP (1, "Invalid array index.");
3117 if (TREE_OPERAND (t
, 2))
3118 CHECK_OP (2, "Invalid array lower bound.");
3119 if (TREE_OPERAND (t
, 3))
3120 CHECK_OP (3, "Invalid array stride.");
3122 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
3124 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3125 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3128 t
= TREE_OPERAND (t
, 0);
3131 if (!CONSTANT_CLASS_P (t
) && !is_gimple_lvalue (t
))
3133 error ("Invalid reference prefix.");
3145 case UNORDERED_EXPR
:
3156 case TRUNC_DIV_EXPR
:
3158 case FLOOR_DIV_EXPR
:
3159 case ROUND_DIV_EXPR
:
3160 case TRUNC_MOD_EXPR
:
3162 case FLOOR_MOD_EXPR
:
3163 case ROUND_MOD_EXPR
:
3165 case EXACT_DIV_EXPR
:
3175 CHECK_OP (0, "Invalid operand to binary operator");
3176 CHECK_OP (1, "Invalid operand to binary operator");
3188 /* Verify STMT, return true if STMT is not in GIMPLE form.
3189 TODO: Implement type checking. */
3192 verify_stmt (tree stmt
, bool last_in_block
)
3196 if (!is_gimple_stmt (stmt
))
3198 error ("Is not a valid GIMPLE statement.");
3202 addr
= walk_tree (&stmt
, verify_expr
, NULL
, NULL
);
3205 debug_generic_stmt (addr
);
3209 /* If the statement is marked as part of an EH region, then it is
3210 expected that the statement could throw. Verify that when we
3211 have optimizations that simplify statements such that we prove
3212 that they cannot throw, that we update other data structures
3214 if (lookup_stmt_eh_region (stmt
) >= 0)
3216 if (!tree_could_throw_p (stmt
))
3218 error ("Statement marked for throw, but doesn%'t.");
3221 if (!last_in_block
&& tree_can_throw_internal (stmt
))
3223 error ("Statement marked for throw in middle of block.");
3231 debug_generic_stmt (stmt
);
3236 /* Return true when the T can be shared. */
3239 tree_node_can_be_shared (tree t
)
3241 if (IS_TYPE_OR_DECL_P (t
)
3242 /* We check for constants explicitly since they are not considered
3243 gimple invariants if they overflowed. */
3244 || CONSTANT_CLASS_P (t
)
3245 || is_gimple_min_invariant (t
)
3246 || TREE_CODE (t
) == SSA_NAME
)
3249 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3250 /* We check for constants explicitly since they are not considered
3251 gimple invariants if they overflowed. */
3252 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 1))
3253 || is_gimple_min_invariant (TREE_OPERAND (t
, 1))))
3254 || (TREE_CODE (t
) == COMPONENT_REF
3255 || TREE_CODE (t
) == REALPART_EXPR
3256 || TREE_CODE (t
) == IMAGPART_EXPR
))
3257 t
= TREE_OPERAND (t
, 0);
3266 /* Called via walk_trees. Verify tree sharing. */
3269 verify_node_sharing (tree
* tp
, int *walk_subtrees
, void *data
)
3271 htab_t htab
= (htab_t
) data
;
3274 if (tree_node_can_be_shared (*tp
))
3276 *walk_subtrees
= false;
3280 slot
= htab_find_slot (htab
, *tp
, INSERT
);
3289 /* Verify the GIMPLE statement chain. */
3295 block_stmt_iterator bsi
;
3300 timevar_push (TV_TREE_STMT_VERIFY
);
3301 htab
= htab_create (37, htab_hash_pointer
, htab_eq_pointer
, NULL
);
3308 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
3310 int phi_num_args
= PHI_NUM_ARGS (phi
);
3312 for (i
= 0; i
< phi_num_args
; i
++)
3314 tree t
= PHI_ARG_DEF (phi
, i
);
3317 /* Addressable variables do have SSA_NAMEs but they
3318 are not considered gimple values. */
3319 if (TREE_CODE (t
) != SSA_NAME
3320 && TREE_CODE (t
) != FUNCTION_DECL
3321 && !is_gimple_val (t
))
3323 error ("PHI def is not a GIMPLE value");
3324 debug_generic_stmt (phi
);
3325 debug_generic_stmt (t
);
3329 addr
= walk_tree (&t
, verify_expr
, NULL
, NULL
);
3332 debug_generic_stmt (addr
);
3336 addr
= walk_tree (&t
, verify_node_sharing
, htab
, NULL
);
3339 error ("Incorrect sharing of tree nodes");
3340 debug_generic_stmt (phi
);
3341 debug_generic_stmt (addr
);
3347 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
3349 tree stmt
= bsi_stmt (bsi
);
3351 err
|= verify_stmt (stmt
, bsi_end_p (bsi
));
3352 addr
= walk_tree (&stmt
, verify_node_sharing
, htab
, NULL
);
3355 error ("Incorrect sharing of tree nodes");
3356 debug_generic_stmt (stmt
);
3357 debug_generic_stmt (addr
);
3364 internal_error ("verify_stmts failed.");
3367 timevar_pop (TV_TREE_STMT_VERIFY
);
3371 /* Verifies that the flow information is OK. */
3374 tree_verify_flow_info (void)
3378 block_stmt_iterator bsi
;
3383 if (ENTRY_BLOCK_PTR
->stmt_list
)
3385 error ("ENTRY_BLOCK has a statement list associated with it\n");
3389 if (EXIT_BLOCK_PTR
->stmt_list
)
3391 error ("EXIT_BLOCK has a statement list associated with it\n");
3395 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
3396 if (e
->flags
& EDGE_FALLTHRU
)
3398 error ("Fallthru to exit from bb %d\n", e
->src
->index
);
3404 bool found_ctrl_stmt
= false;
3406 /* Skip labels on the start of basic block. */
3407 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3409 if (TREE_CODE (bsi_stmt (bsi
)) != LABEL_EXPR
)
3412 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi
))) != bb
)
3414 tree stmt
= bsi_stmt (bsi
);
3415 error ("Label %s to block does not match in bb %d\n",
3416 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3421 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi
)))
3422 != current_function_decl
)
3424 tree stmt
= bsi_stmt (bsi
);
3425 error ("Label %s has incorrect context in bb %d\n",
3426 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3432 /* Verify that body of basic block BB is free of control flow. */
3433 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
3435 tree stmt
= bsi_stmt (bsi
);
3437 if (found_ctrl_stmt
)
3439 error ("Control flow in the middle of basic block %d\n",
3444 if (stmt_ends_bb_p (stmt
))
3445 found_ctrl_stmt
= true;
3447 if (TREE_CODE (stmt
) == LABEL_EXPR
)
3449 error ("Label %s in the middle of basic block %d\n",
3450 IDENTIFIER_POINTER (DECL_NAME (stmt
)),
3455 bsi
= bsi_last (bb
);
3456 if (bsi_end_p (bsi
))
3459 stmt
= bsi_stmt (bsi
);
3461 if (is_ctrl_stmt (stmt
))
3463 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3464 if (e
->flags
& EDGE_FALLTHRU
)
3466 error ("Fallthru edge after a control statement in bb %d \n",
3472 switch (TREE_CODE (stmt
))
3478 if (TREE_CODE (COND_EXPR_THEN (stmt
)) != GOTO_EXPR
3479 || TREE_CODE (COND_EXPR_ELSE (stmt
)) != GOTO_EXPR
)
3481 error ("Structured COND_EXPR at the end of bb %d\n", bb
->index
);
3485 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
3487 if (!true_edge
|| !false_edge
3488 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
3489 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
3490 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3491 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3492 || EDGE_COUNT (bb
->succs
) >= 3)
3494 error ("Wrong outgoing edge flags at end of bb %d\n",
3499 if (!has_label_p (true_edge
->dest
,
3500 GOTO_DESTINATION (COND_EXPR_THEN (stmt
))))
3502 error ("%<then%> label does not match edge at end of bb %d\n",
3507 if (!has_label_p (false_edge
->dest
,
3508 GOTO_DESTINATION (COND_EXPR_ELSE (stmt
))))
3510 error ("%<else%> label does not match edge at end of bb %d\n",
3518 if (simple_goto_p (stmt
))
3520 error ("Explicit goto at end of bb %d\n", bb
->index
);
3525 /* FIXME. We should double check that the labels in the
3526 destination blocks have their address taken. */
3527 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3528 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
3529 | EDGE_FALSE_VALUE
))
3530 || !(e
->flags
& EDGE_ABNORMAL
))
3532 error ("Wrong outgoing edge flags at end of bb %d\n",
3540 if (EDGE_COUNT (bb
->succs
) != 1
3541 || (EDGE_SUCC (bb
, 0)->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3542 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3544 error ("Wrong outgoing edge flags at end of bb %d\n", bb
->index
);
3547 if (EDGE_SUCC (bb
, 0)->dest
!= EXIT_BLOCK_PTR
)
3549 error ("Return edge does not point to exit in bb %d\n",
3562 vec
= SWITCH_LABELS (stmt
);
3563 n
= TREE_VEC_LENGTH (vec
);
3565 /* Mark all the destination basic blocks. */
3566 for (i
= 0; i
< n
; ++i
)
3568 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3569 basic_block label_bb
= label_to_block (lab
);
3571 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
3572 label_bb
->aux
= (void *)1;
3575 /* Verify that the case labels are sorted. */
3576 prev
= TREE_VEC_ELT (vec
, 0);
3577 for (i
= 1; i
< n
- 1; ++i
)
3579 tree c
= TREE_VEC_ELT (vec
, i
);
3582 error ("Found default case not at end of case vector");
3586 if (! tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
3588 error ("Case labels not sorted:\n ");
3589 print_generic_expr (stderr
, prev
, 0);
3590 fprintf (stderr
," is greater than ");
3591 print_generic_expr (stderr
, c
, 0);
3592 fprintf (stderr
," but comes before it.\n");
3597 if (CASE_LOW (TREE_VEC_ELT (vec
, n
- 1)))
3599 error ("No default case found at end of case vector");
3603 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3607 error ("Extra outgoing edge %d->%d\n",
3608 bb
->index
, e
->dest
->index
);
3611 e
->dest
->aux
= (void *)2;
3612 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3613 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3615 error ("Wrong outgoing edge flags at end of bb %d\n",
3621 /* Check that we have all of them. */
3622 for (i
= 0; i
< n
; ++i
)
3624 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3625 basic_block label_bb
= label_to_block (lab
);
3627 if (label_bb
->aux
!= (void *)2)
3629 error ("Missing edge %i->%i\n",
3630 bb
->index
, label_bb
->index
);
3635 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3636 e
->dest
->aux
= (void *)0;
3643 if (dom_computed
[CDI_DOMINATORS
] >= DOM_NO_FAST_QUERY
)
3644 verify_dominators (CDI_DOMINATORS
);
3650 /* Updates phi nodes after creating a forwarder block joined
3651 by edge FALLTHRU. */
3654 tree_make_forwarder_block (edge fallthru
)
3658 basic_block dummy
, bb
;
3659 tree phi
, new_phi
, var
;
3661 dummy
= fallthru
->src
;
3662 bb
= fallthru
->dest
;
3664 if (EDGE_COUNT (bb
->preds
) == 1)
3667 /* If we redirected a branch we must create new phi nodes at the
3669 for (phi
= phi_nodes (dummy
); phi
; phi
= PHI_CHAIN (phi
))
3671 var
= PHI_RESULT (phi
);
3672 new_phi
= create_phi_node (var
, bb
);
3673 SSA_NAME_DEF_STMT (var
) = new_phi
;
3674 SET_PHI_RESULT (phi
, make_ssa_name (SSA_NAME_VAR (var
), phi
));
3675 add_phi_arg (&new_phi
, PHI_RESULT (phi
), fallthru
);
3678 /* Ensure that the PHI node chain is in the same order. */
3679 set_phi_nodes (bb
, phi_reverse (phi_nodes (bb
)));
3681 /* Add the arguments we have stored on edges. */
3682 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3687 flush_pending_stmts (e
);
3692 /* Return true if basic block BB does nothing except pass control
3693 flow to another block and that we can safely insert a label at
3694 the start of the successor block.
3696 As a precondition, we require that BB be not equal to
3700 tree_forwarder_block_p (basic_block bb
)
3702 block_stmt_iterator bsi
;
3706 /* BB must have a single outgoing edge. */
3707 if (EDGE_COUNT (bb
->succs
) != 1
3708 /* BB can not have any PHI nodes. This could potentially be
3709 relaxed early in compilation if we re-rewrote the variables
3710 appearing in any PHI nodes in forwarder blocks. */
3712 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3713 || EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
3714 /* BB may not have an abnormal outgoing edge. */
3715 || (EDGE_SUCC (bb
, 0)->flags
& EDGE_ABNORMAL
))
3719 gcc_assert (bb
!= ENTRY_BLOCK_PTR
);
3722 /* Successors of the entry block are not forwarders. */
3723 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
3727 /* Now walk through the statements. We can ignore labels, anything else
3728 means this is not a forwarder block. */
3729 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3731 tree stmt
= bsi_stmt (bsi
);
3733 switch (TREE_CODE (stmt
))
3736 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
3748 /* Thread jumps from BB. */
3751 thread_jumps_from_bb (basic_block bb
)
3755 bool retval
= false;
3757 /* Examine each of our block's successors to see if it is
3759 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3764 basic_block dest
, tmp
, curr
, old_dest
;
3768 /* If the edge is abnormal or its destination is not
3769 forwardable, then there's nothing to do. */
3770 if ((e
->flags
& EDGE_ABNORMAL
)
3771 || !bb_ann (e
->dest
)->forwardable
)
3777 /* Now walk through as many forwarder blocks as possible to find
3778 the ultimate destination we want to thread our jump to. */
3779 last
= EDGE_SUCC (e
->dest
, 0);
3780 bb_ann (e
->dest
)->forwardable
= 0;
3781 for (dest
= EDGE_SUCC (e
->dest
, 0)->dest
;
3782 bb_ann (dest
)->forwardable
;
3783 last
= EDGE_SUCC (dest
, 0),
3784 dest
= EDGE_SUCC (dest
, 0)->dest
)
3785 bb_ann (dest
)->forwardable
= 0;
3787 /* Reset the forwardable marks to 1. */
3790 tmp
= EDGE_SUCC (tmp
, 0)->dest
)
3791 bb_ann (tmp
)->forwardable
= 1;
3793 if (dest
== e
->dest
)
3799 old
= find_edge (bb
, dest
);
3802 /* If there already is an edge, check whether the values in
3803 phi nodes differ. */
3804 if (!phi_alternatives_equal (dest
, last
, old
))
3806 /* The previous block is forwarder. Redirect our jump
3807 to that target instead since we know it has no PHI
3808 nodes that will need updating. */
3811 /* That might mean that no forwarding at all is
3813 if (dest
== e
->dest
)
3819 old
= find_edge (bb
, dest
);
3823 /* Perform the redirection. */
3826 freq
= EDGE_FREQUENCY (e
);
3828 e
= redirect_edge_and_branch (e
, dest
);
3830 /* Update the profile. */
3831 if (profile_status
!= PROFILE_ABSENT
)
3832 for (curr
= old_dest
;
3834 curr
= EDGE_SUCC (curr
, 0)->dest
)
3836 curr
->frequency
-= freq
;
3837 if (curr
->frequency
< 0)
3838 curr
->frequency
= 0;
3839 curr
->count
-= count
;
3840 if (curr
->count
< 0)
3842 EDGE_SUCC (curr
, 0)->count
-= count
;
3843 if (EDGE_SUCC (curr
, 0)->count
< 0)
3844 EDGE_SUCC (curr
, 0)->count
= 0;
3849 /* Update PHI nodes. We know that the new argument should
3850 have the same value as the argument associated with LAST.
3851 Otherwise we would have changed our target block
3853 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
3855 arg
= phi_arg_from_edge (phi
, last
);
3856 gcc_assert (arg
>= 0);
3857 add_phi_arg (&phi
, PHI_ARG_DEF (phi
, arg
), e
);
3861 /* Remove the unreachable blocks (observe that if all blocks
3862 were reachable before, only those in the path we threaded
3863 over and did not have any predecessor outside of the path
3864 become unreachable). */
3865 for (; old_dest
!= dest
; old_dest
= tmp
)
3867 tmp
= EDGE_SUCC (old_dest
, 0)->dest
;
3869 if (EDGE_COUNT (old_dest
->preds
) > 0)
3872 delete_basic_block (old_dest
);
3875 /* Update the dominators. */
3876 if (dom_info_available_p (CDI_DOMINATORS
))
3878 /* If the dominator of the destination was in the
3879 path, set its dominator to the start of the
3881 if (get_immediate_dominator (CDI_DOMINATORS
, old_dest
) == NULL
)
3882 set_immediate_dominator (CDI_DOMINATORS
, old_dest
, bb
);
3884 /* Now proceed like if we forwarded just over one edge at a
3885 time. Algorithm for forwarding edge S --> A over
3886 edge A --> B then is
3889 && !dominated_by (S, B))
3890 idom (B) = idom (A);
3891 recount_idom (A); */
3893 for (; old_dest
!= dest
; old_dest
= tmp
)
3897 tmp
= EDGE_SUCC (old_dest
, 0)->dest
;
3899 if (get_immediate_dominator (CDI_DOMINATORS
, tmp
) == old_dest
3900 && !dominated_by_p (CDI_DOMINATORS
, bb
, tmp
))
3902 dom
= get_immediate_dominator (CDI_DOMINATORS
, old_dest
);
3903 set_immediate_dominator (CDI_DOMINATORS
, tmp
, dom
);
3906 dom
= recount_dominator (CDI_DOMINATORS
, old_dest
);
3907 set_immediate_dominator (CDI_DOMINATORS
, old_dest
, dom
);
3916 /* Thread jumps over empty statements.
3918 This code should _not_ thread over obviously equivalent conditions
3919 as that requires nontrivial updates to the SSA graph.
3921 As a precondition, we require that all basic blocks be reachable.
3922 That is, there should be no opportunities left for
3923 delete_unreachable_blocks. */
3929 bool retval
= false;
3930 basic_block
*worklist
= xmalloc (sizeof (basic_block
) * last_basic_block
);
3931 basic_block
*current
= worklist
;
3935 bb_ann (bb
)->forwardable
= tree_forwarder_block_p (bb
);
3936 bb
->flags
&= ~BB_VISITED
;
3939 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
3940 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
3941 ENTRY_BLOCK_PTR
->flags
|= BB_VISITED
;
3943 /* Initialize WORKLIST by putting non-forwarder blocks that
3944 immediately precede forwarder blocks because those are the ones
3945 that we know we can thread jumps from. We use BB_VISITED to
3946 indicate whether a given basic block is in WORKLIST or not,
3947 thereby avoiding duplicates in WORKLIST. */
3953 /* We are not interested in finding non-forwarder blocks
3954 directly. We want to find non-forwarder blocks as
3955 predecessors of a forwarder block. */
3956 if (!bb_ann (bb
)->forwardable
)
3959 /* Now we know BB is a forwarder block. Visit each of its
3960 incoming edges and add to WORKLIST all non-forwarder blocks
3961 among BB's predecessors. */
3962 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3964 /* We don't want to put a duplicate into WORKLIST. */
3965 if ((e
->src
->flags
& BB_VISITED
) == 0
3966 /* We are not interested in threading jumps from a forwarder
3968 && !bb_ann (e
->src
)->forwardable
)
3970 e
->src
->flags
|= BB_VISITED
;
3971 *current
++ = e
->src
;
3976 /* Now let's drain WORKLIST. */
3977 while (worklist
!= current
)
3981 /* BB is no longer in WORKLIST, so clear BB_VISITED. */
3982 bb
->flags
&= ~BB_VISITED
;
3984 if (thread_jumps_from_bb (bb
))
3988 if (tree_forwarder_block_p (bb
))
3993 bb_ann (bb
)->forwardable
= true;
3995 /* Attempts to thread through BB may have been blocked
3996 because BB was not a forwarder block before. Now
3997 that BB is a forwarder block, we should revisit BB's
3999 FOR_EACH_EDGE (f
, ej
, bb
->preds
)
4001 /* We don't want to put a duplicate into WORKLIST. */
4002 if ((f
->src
->flags
& BB_VISITED
) == 0
4003 /* We are not interested in threading jumps from a
4005 && !bb_ann (f
->src
)->forwardable
)
4007 f
->src
->flags
|= BB_VISITED
;
4008 *current
++ = f
->src
;
4015 ENTRY_BLOCK_PTR
->flags
&= ~BB_VISITED
;
4023 /* Return a non-special label in the head of basic block BLOCK.
4024 Create one if it doesn't exist. */
4027 tree_block_label (basic_block bb
)
4029 block_stmt_iterator i
, s
= bsi_start (bb
);
4033 for (i
= s
; !bsi_end_p (i
); first
= false, bsi_next (&i
))
4035 stmt
= bsi_stmt (i
);
4036 if (TREE_CODE (stmt
) != LABEL_EXPR
)
4038 label
= LABEL_EXPR_LABEL (stmt
);
4039 if (!DECL_NONLOCAL (label
))
4042 bsi_move_before (&i
, &s
);
4047 label
= create_artificial_label ();
4048 stmt
= build1 (LABEL_EXPR
, void_type_node
, label
);
4049 bsi_insert_before (&s
, stmt
, BSI_NEW_STMT
);
4054 /* Attempt to perform edge redirection by replacing a possibly complex
4055 jump instruction by a goto or by removing the jump completely.
4056 This can apply only if all edges now point to the same block. The
4057 parameters and return values are equivalent to
4058 redirect_edge_and_branch. */
4061 tree_try_redirect_by_replacing_jump (edge e
, basic_block target
)
4063 basic_block src
= e
->src
;
4065 block_stmt_iterator b
;
4069 /* Verify that all targets will be TARGET. */
4070 FOR_EACH_EDGE (tmp
, ei
, src
->succs
)
4071 if (tmp
->dest
!= target
&& tmp
!= e
)
4080 stmt
= bsi_stmt (b
);
4082 if (TREE_CODE (stmt
) == COND_EXPR
4083 || TREE_CODE (stmt
) == SWITCH_EXPR
)
4086 e
= ssa_redirect_edge (e
, target
);
4087 e
->flags
= EDGE_FALLTHRU
;
4095 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4096 edge representing the redirected branch. */
4099 tree_redirect_edge_and_branch (edge e
, basic_block dest
)
4101 basic_block bb
= e
->src
;
4102 block_stmt_iterator bsi
;
4106 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4109 if (e
->src
!= ENTRY_BLOCK_PTR
4110 && (ret
= tree_try_redirect_by_replacing_jump (e
, dest
)))
4113 if (e
->dest
== dest
)
4116 label
= tree_block_label (dest
);
4118 bsi
= bsi_last (bb
);
4119 stmt
= bsi_end_p (bsi
) ? NULL
: bsi_stmt (bsi
);
4121 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
4124 stmt
= (e
->flags
& EDGE_TRUE_VALUE
4125 ? COND_EXPR_THEN (stmt
)
4126 : COND_EXPR_ELSE (stmt
));
4127 GOTO_DESTINATION (stmt
) = label
;
4131 /* No non-abnormal edges should lead from a non-simple goto, and
4132 simple ones should be represented implicitly. */
4137 tree vec
= SWITCH_LABELS (stmt
);
4138 size_t i
, n
= TREE_VEC_LENGTH (vec
);
4140 for (i
= 0; i
< n
; ++i
)
4142 tree elt
= TREE_VEC_ELT (vec
, i
);
4143 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
4144 CASE_LABEL (elt
) = label
;
4151 e
->flags
|= EDGE_FALLTHRU
;
4155 /* Otherwise it must be a fallthru edge, and we don't need to
4156 do anything besides redirecting it. */
4157 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
4161 /* Update/insert PHI nodes as necessary. */
4163 /* Now update the edges in the CFG. */
4164 e
= ssa_redirect_edge (e
, dest
);
4170 /* Simple wrapper, as we can always redirect fallthru edges. */
4173 tree_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4175 e
= tree_redirect_edge_and_branch (e
, dest
);
4182 /* Splits basic block BB after statement STMT (but at least after the
4183 labels). If STMT is NULL, BB is split just after the labels. */
4186 tree_split_block (basic_block bb
, void *stmt
)
4188 block_stmt_iterator bsi
, bsi_tgt
;
4194 new_bb
= create_empty_bb (bb
);
4196 /* Redirect the outgoing edges. */
4197 new_bb
->succs
= bb
->succs
;
4199 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
4202 if (stmt
&& TREE_CODE ((tree
) stmt
) == LABEL_EXPR
)
4205 /* Move everything from BSI to the new basic block. */
4206 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4208 act
= bsi_stmt (bsi
);
4209 if (TREE_CODE (act
) == LABEL_EXPR
)
4222 bsi_tgt
= bsi_start (new_bb
);
4223 while (!bsi_end_p (bsi
))
4225 act
= bsi_stmt (bsi
);
4227 bsi_insert_after (&bsi_tgt
, act
, BSI_NEW_STMT
);
4234 /* Moves basic block BB after block AFTER. */
4237 tree_move_block_after (basic_block bb
, basic_block after
)
4239 if (bb
->prev_bb
== after
)
4243 link_block (bb
, after
);
4249 /* Return true if basic_block can be duplicated. */
4252 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED
)
4257 /* Create a duplicate of the basic block BB. NOTE: This does not
4258 preserve SSA form. */
4261 tree_duplicate_bb (basic_block bb
)
4264 block_stmt_iterator bsi
, bsi_tgt
;
4266 ssa_op_iter op_iter
;
4268 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
4270 /* First copy the phi nodes. We do not copy phi node arguments here,
4271 since the edges are not ready yet. Keep the chain of phi nodes in
4272 the same order, so that we can add them later. */
4273 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4275 mark_for_rewrite (PHI_RESULT (phi
));
4276 create_phi_node (PHI_RESULT (phi
), new_bb
);
4278 set_phi_nodes (new_bb
, phi_reverse (phi_nodes (new_bb
)));
4280 bsi_tgt
= bsi_start (new_bb
);
4281 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4283 tree stmt
= bsi_stmt (bsi
);
4286 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4289 /* Record the definitions. */
4290 get_stmt_operands (stmt
);
4292 FOR_EACH_SSA_TREE_OPERAND (val
, stmt
, op_iter
, SSA_OP_ALL_DEFS
)
4293 mark_for_rewrite (val
);
4295 copy
= unshare_expr (stmt
);
4297 /* Copy also the virtual operands. */
4298 get_stmt_ann (copy
);
4299 copy_virtual_operands (copy
, stmt
);
4301 bsi_insert_after (&bsi_tgt
, copy
, BSI_NEW_STMT
);
4307 /* Basic block BB_COPY was created by code duplication. Add phi node
4308 arguments for edges going out of BB_COPY. The blocks that were
4309 duplicated have rbi->duplicated set to one. */
4312 add_phi_args_after_copy_bb (basic_block bb_copy
)
4314 basic_block bb
, dest
;
4317 tree phi
, phi_copy
, phi_next
, def
;
4319 bb
= bb_copy
->rbi
->original
;
4321 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
4323 if (!phi_nodes (e_copy
->dest
))
4326 if (e_copy
->dest
->rbi
->duplicated
)
4327 dest
= e_copy
->dest
->rbi
->original
;
4329 dest
= e_copy
->dest
;
4331 e
= find_edge (bb
, dest
);
4334 /* During loop unrolling the target of the latch edge is copied.
4335 In this case we are not looking for edge to dest, but to
4336 duplicated block whose original was dest. */
4337 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4338 if (e
->dest
->rbi
->duplicated
4339 && e
->dest
->rbi
->original
== dest
)
4342 gcc_assert (e
!= NULL
);
4345 for (phi
= phi_nodes (e
->dest
), phi_copy
= phi_nodes (e_copy
->dest
);
4347 phi
= phi_next
, phi_copy
= TREE_CHAIN (phi_copy
))
4349 phi_next
= TREE_CHAIN (phi
);
4351 gcc_assert (PHI_RESULT (phi
) == PHI_RESULT (phi_copy
));
4352 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4353 add_phi_arg (&phi_copy
, def
, e_copy
);
4358 /* Blocks in REGION_COPY array of length N_REGION were created by
4359 duplication of basic blocks. Add phi node arguments for edges
4360 going from these blocks. */
4363 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
)
4367 for (i
= 0; i
< n_region
; i
++)
4368 region_copy
[i
]->rbi
->duplicated
= 1;
4370 for (i
= 0; i
< n_region
; i
++)
4371 add_phi_args_after_copy_bb (region_copy
[i
]);
4373 for (i
= 0; i
< n_region
; i
++)
4374 region_copy
[i
]->rbi
->duplicated
= 0;
4377 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4379 struct ssa_name_map_entry
4385 /* Hash function for ssa_name_map_entry. */
4388 ssa_name_map_entry_hash (const void *entry
)
4390 const struct ssa_name_map_entry
*en
= entry
;
4391 return SSA_NAME_VERSION (en
->from_name
);
4394 /* Equality function for ssa_name_map_entry. */
4397 ssa_name_map_entry_eq (const void *in_table
, const void *ssa_name
)
4399 const struct ssa_name_map_entry
*en
= in_table
;
4401 return en
->from_name
== ssa_name
;
4404 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4408 allocate_ssa_names (bitmap definitions
, htab_t
*map
)
4411 struct ssa_name_map_entry
*entry
;
4417 *map
= htab_create (10, ssa_name_map_entry_hash
,
4418 ssa_name_map_entry_eq
, free
);
4419 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4421 name
= ssa_name (ver
);
4422 slot
= htab_find_slot_with_hash (*map
, name
, SSA_NAME_VERSION (name
),
4428 entry
= xmalloc (sizeof (struct ssa_name_map_entry
));
4429 entry
->from_name
= name
;
4432 entry
->to_name
= duplicate_ssa_name (name
, SSA_NAME_DEF_STMT (name
));
4436 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4437 by the mapping MAP. */
4440 rewrite_to_new_ssa_names_def (def_operand_p def
, tree stmt
, htab_t map
)
4442 tree name
= DEF_FROM_PTR (def
);
4443 struct ssa_name_map_entry
*entry
;
4445 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
4447 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4451 SET_DEF (def
, entry
->to_name
);
4452 SSA_NAME_DEF_STMT (entry
->to_name
) = stmt
;
4455 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4458 rewrite_to_new_ssa_names_use (use_operand_p use
, htab_t map
)
4460 tree name
= USE_FROM_PTR (use
);
4461 struct ssa_name_map_entry
*entry
;
4463 if (TREE_CODE (name
) != SSA_NAME
)
4466 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4470 SET_USE (use
, entry
->to_name
);
4473 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4477 rewrite_to_new_ssa_names_bb (basic_block bb
, htab_t map
)
4483 block_stmt_iterator bsi
;
4487 v_may_def_optype v_may_defs
;
4488 v_must_def_optype v_must_defs
;
4491 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4492 if (e
->flags
& EDGE_ABNORMAL
)
4495 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4497 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi
), phi
, map
);
4499 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)) = 1;
4502 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4504 stmt
= bsi_stmt (bsi
);
4505 get_stmt_operands (stmt
);
4506 ann
= stmt_ann (stmt
);
4508 uses
= USE_OPS (ann
);
4509 for (i
= 0; i
< NUM_USES (uses
); i
++)
4510 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses
, i
), map
);
4512 defs
= DEF_OPS (ann
);
4513 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
4514 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs
, i
), stmt
, map
);
4516 vuses
= VUSE_OPS (ann
);
4517 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
4518 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses
, i
), map
);
4520 v_may_defs
= V_MAY_DEF_OPS (ann
);
4521 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
4523 rewrite_to_new_ssa_names_use
4524 (V_MAY_DEF_OP_PTR (v_may_defs
, i
), map
);
4525 rewrite_to_new_ssa_names_def
4526 (V_MAY_DEF_RESULT_PTR (v_may_defs
, i
), stmt
, map
);
4529 v_must_defs
= V_MUST_DEF_OPS (ann
);
4530 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
4532 rewrite_to_new_ssa_names_def
4533 (V_MUST_DEF_RESULT_PTR (v_must_defs
, i
), stmt
, map
);
4534 rewrite_to_new_ssa_names_use
4535 (V_MUST_DEF_KILL_PTR (v_must_defs
, i
), map
);
4539 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4540 for (phi
= phi_nodes (e
->dest
); phi
; phi
= PHI_CHAIN (phi
))
4542 rewrite_to_new_ssa_names_use
4543 (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
), map
);
4545 if (e
->flags
& EDGE_ABNORMAL
)
4547 tree op
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4548 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op
) = 1;
4553 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4554 by the mapping MAP. */
4557 rewrite_to_new_ssa_names (basic_block
*region
, unsigned n_region
, htab_t map
)
4561 for (r
= 0; r
< n_region
; r
++)
4562 rewrite_to_new_ssa_names_bb (region
[r
], map
);
4565 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4566 important exit edge EXIT. By important we mean that no SSA name defined
4567 inside region is live over the other exit edges of the region. All entry
4568 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4569 to the duplicate of the region. SSA form, dominance and loop information
4570 is updated. The new basic blocks are stored to REGION_COPY in the same
4571 order as they had in REGION, provided that REGION_COPY is not NULL.
4572 The function returns false if it is unable to copy the region,
4576 tree_duplicate_sese_region (edge entry
, edge exit
,
4577 basic_block
*region
, unsigned n_region
,
4578 basic_block
*region_copy
)
4580 unsigned i
, n_doms
, ver
;
4581 bool free_region_copy
= false, copying_header
= false;
4582 struct loop
*loop
= entry
->dest
->loop_father
;
4587 htab_t ssa_name_map
= NULL
;
4591 if (!can_copy_bbs_p (region
, n_region
))
4594 /* Some sanity checking. Note that we do not check for all possible
4595 missuses of the functions. I.e. if you ask to copy something weird,
4596 it will work, but the state of structures probably will not be
4599 for (i
= 0; i
< n_region
; i
++)
4601 /* We do not handle subloops, i.e. all the blocks must belong to the
4603 if (region
[i
]->loop_father
!= loop
)
4606 if (region
[i
] != entry
->dest
4607 && region
[i
] == loop
->header
)
4613 /* In case the function is used for loop header copying (which is the primary
4614 use), ensure that EXIT and its copy will be new latch and entry edges. */
4615 if (loop
->header
== entry
->dest
)
4617 copying_header
= true;
4618 loop
->copy
= loop
->outer
;
4620 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
4623 for (i
= 0; i
< n_region
; i
++)
4624 if (region
[i
] != exit
->src
4625 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
4631 region_copy
= xmalloc (sizeof (basic_block
) * n_region
);
4632 free_region_copy
= true;
4635 gcc_assert (!any_marked_for_rewrite_p ());
4637 /* Record blocks outside the region that are duplicated by something
4639 doms
= xmalloc (sizeof (basic_block
) * n_basic_blocks
);
4640 n_doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
, doms
);
4642 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
);
4643 definitions
= marked_ssa_names ();
4647 loop
->header
= exit
->dest
;
4648 loop
->latch
= exit
->src
;
4651 /* Redirect the entry and add the phi node arguments. */
4652 redirected
= redirect_edge_and_branch (entry
, entry
->dest
->rbi
->copy
);
4653 gcc_assert (redirected
!= NULL
);
4654 flush_pending_stmts (entry
);
4656 /* Concerning updating of dominators: We must recount dominators
4657 for entry block and its copy. Anything that is outside of the region, but
4658 was dominated by something inside needs recounting as well. */
4659 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
4660 doms
[n_doms
++] = entry
->dest
->rbi
->original
;
4661 iterate_fix_dominators (CDI_DOMINATORS
, doms
, n_doms
);
4664 /* Add the other phi node arguments. */
4665 add_phi_args_after_copy (region_copy
, n_region
);
4667 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4668 uses, it should be possible to emit phi nodes just for definitions that
4669 are used outside region. */
4670 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4672 tree name
= ssa_name (ver
);
4674 phi
= create_phi_node (name
, exit
->dest
);
4675 add_phi_arg (&phi
, name
, exit
);
4676 add_phi_arg (&phi
, name
, exit_copy
);
4678 SSA_NAME_DEF_STMT (name
) = phi
;
4681 /* And create new definitions inside region and its copy. TODO -- once we
4682 have immediate uses, it might be better to leave definitions in region
4683 unchanged, create new ssa names for phi nodes on exit, and rewrite
4684 the uses, to avoid changing the copied region. */
4685 allocate_ssa_names (definitions
, &ssa_name_map
);
4686 rewrite_to_new_ssa_names (region
, n_region
, ssa_name_map
);
4687 allocate_ssa_names (definitions
, &ssa_name_map
);
4688 rewrite_to_new_ssa_names (region_copy
, n_region
, ssa_name_map
);
4689 htab_delete (ssa_name_map
);
4691 if (free_region_copy
)
4694 unmark_all_for_rewrite ();
4695 BITMAP_XFREE (definitions
);
4700 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4703 dump_function_to_file (tree fn
, FILE *file
, int flags
)
4705 tree arg
, vars
, var
;
4706 bool ignore_topmost_bind
= false, any_var
= false;
4710 fprintf (file
, "%s (", lang_hooks
.decl_printable_name (fn
, 2));
4712 arg
= DECL_ARGUMENTS (fn
);
4715 print_generic_expr (file
, arg
, dump_flags
);
4716 if (TREE_CHAIN (arg
))
4717 fprintf (file
, ", ");
4718 arg
= TREE_CHAIN (arg
);
4720 fprintf (file
, ")\n");
4722 if (flags
& TDF_RAW
)
4724 dump_node (fn
, TDF_SLIM
| flags
, file
);
4728 /* When GIMPLE is lowered, the variables are no longer available in
4729 BIND_EXPRs, so display them separately. */
4730 if (cfun
&& cfun
->unexpanded_var_list
)
4732 ignore_topmost_bind
= true;
4734 fprintf (file
, "{\n");
4735 for (vars
= cfun
->unexpanded_var_list
; vars
; vars
= TREE_CHAIN (vars
))
4737 var
= TREE_VALUE (vars
);
4739 print_generic_decl (file
, var
, flags
);
4740 fprintf (file
, "\n");
4746 if (basic_block_info
)
4748 /* Make a CFG based dump. */
4749 check_bb_profile (ENTRY_BLOCK_PTR
, file
);
4750 if (!ignore_topmost_bind
)
4751 fprintf (file
, "{\n");
4753 if (any_var
&& n_basic_blocks
)
4754 fprintf (file
, "\n");
4757 dump_generic_bb (file
, bb
, 2, flags
);
4759 fprintf (file
, "}\n");
4760 check_bb_profile (EXIT_BLOCK_PTR
, file
);
4766 /* Make a tree based dump. */
4767 chain
= DECL_SAVED_TREE (fn
);
4769 if (TREE_CODE (chain
) == BIND_EXPR
)
4771 if (ignore_topmost_bind
)
4773 chain
= BIND_EXPR_BODY (chain
);
4781 if (!ignore_topmost_bind
)
4782 fprintf (file
, "{\n");
4787 fprintf (file
, "\n");
4789 print_generic_stmt_indented (file
, chain
, flags
, indent
);
4790 if (ignore_topmost_bind
)
4791 fprintf (file
, "}\n");
4794 fprintf (file
, "\n\n");
4798 /* Pretty print of the loops intermediate representation. */
4799 static void print_loop (FILE *, struct loop
*, int);
4800 static void print_pred_bbs (FILE *, basic_block bb
);
4801 static void print_succ_bbs (FILE *, basic_block bb
);
4804 /* Print the predecessors indexes of edge E on FILE. */
4807 print_pred_bbs (FILE *file
, basic_block bb
)
4812 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4813 fprintf (file
, "bb_%d", e
->src
->index
);
4817 /* Print the successors indexes of edge E on FILE. */
4820 print_succ_bbs (FILE *file
, basic_block bb
)
4825 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4826 fprintf (file
, "bb_%d", e
->src
->index
);
4830 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4833 print_loop (FILE *file
, struct loop
*loop
, int indent
)
4841 s_indent
= (char *) alloca ((size_t) indent
+ 1);
4842 memset ((void *) s_indent
, ' ', (size_t) indent
);
4843 s_indent
[indent
] = '\0';
4845 /* Print the loop's header. */
4846 fprintf (file
, "%sloop_%d\n", s_indent
, loop
->num
);
4848 /* Print the loop's body. */
4849 fprintf (file
, "%s{\n", s_indent
);
4851 if (bb
->loop_father
== loop
)
4853 /* Print the basic_block's header. */
4854 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
4855 print_pred_bbs (file
, bb
);
4856 fprintf (file
, "}, succs = {");
4857 print_succ_bbs (file
, bb
);
4858 fprintf (file
, "})\n");
4860 /* Print the basic_block's body. */
4861 fprintf (file
, "%s {\n", s_indent
);
4862 tree_dump_bb (bb
, file
, indent
+ 4);
4863 fprintf (file
, "%s }\n", s_indent
);
4866 print_loop (file
, loop
->inner
, indent
+ 2);
4867 fprintf (file
, "%s}\n", s_indent
);
4868 print_loop (file
, loop
->next
, indent
);
4872 /* Follow a CFG edge from the entry point of the program, and on entry
4873 of a loop, pretty print the loop structure on FILE. */
4876 print_loop_ir (FILE *file
)
4880 bb
= BASIC_BLOCK (0);
4881 if (bb
&& bb
->loop_father
)
4882 print_loop (file
, bb
->loop_father
, 0);
4886 /* Debugging loops structure at tree level. */
4889 debug_loop_ir (void)
4891 print_loop_ir (stderr
);
4895 /* Return true if BB ends with a call, possibly followed by some
4896 instructions that must stay with the call. Return false,
4900 tree_block_ends_with_call_p (basic_block bb
)
4902 block_stmt_iterator bsi
= bsi_last (bb
);
4903 return get_call_expr_in (bsi_stmt (bsi
)) != NULL
;
4907 /* Return true if BB ends with a conditional branch. Return false,
4911 tree_block_ends_with_condjump_p (basic_block bb
)
4913 tree stmt
= tsi_stmt (bsi_last (bb
).tsi
);
4914 return (TREE_CODE (stmt
) == COND_EXPR
);
4918 /* Return true if we need to add fake edge to exit at statement T.
4919 Helper function for tree_flow_call_edges_add. */
4922 need_fake_edge_p (tree t
)
4926 /* NORETURN and LONGJMP calls already have an edge to exit.
4927 CONST, PURE and ALWAYS_RETURN calls do not need one.
4928 We don't currently check for CONST and PURE here, although
4929 it would be a good idea, because those attributes are
4930 figured out from the RTL in mark_constant_function, and
4931 the counter incrementation code from -fprofile-arcs
4932 leads to different results from -fbranch-probabilities. */
4933 call
= get_call_expr_in (t
);
4935 && !(call_expr_flags (call
) &
4936 (ECF_NORETURN
| ECF_LONGJMP
| ECF_ALWAYS_RETURN
)))
4939 if (TREE_CODE (t
) == ASM_EXPR
4940 && (ASM_VOLATILE_P (t
) || ASM_INPUT_P (t
)))
4947 /* Add fake edges to the function exit for any non constant and non
4948 noreturn calls, volatile inline assembly in the bitmap of blocks
4949 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4950 the number of blocks that were split.
4952 The goal is to expose cases in which entering a basic block does
4953 not imply that all subsequent instructions must be executed. */
4956 tree_flow_call_edges_add (sbitmap blocks
)
4959 int blocks_split
= 0;
4960 int last_bb
= last_basic_block
;
4961 bool check_last_block
= false;
4963 if (n_basic_blocks
== 0)
4967 check_last_block
= true;
4969 check_last_block
= TEST_BIT (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
4971 /* In the last basic block, before epilogue generation, there will be
4972 a fallthru edge to EXIT. Special care is required if the last insn
4973 of the last basic block is a call because make_edge folds duplicate
4974 edges, which would result in the fallthru edge also being marked
4975 fake, which would result in the fallthru edge being removed by
4976 remove_fake_edges, which would result in an invalid CFG.
4978 Moreover, we can't elide the outgoing fake edge, since the block
4979 profiler needs to take this into account in order to solve the minimal
4980 spanning tree in the case that the call doesn't return.
4982 Handle this by adding a dummy instruction in a new last basic block. */
4983 if (check_last_block
)
4986 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
4987 block_stmt_iterator bsi
= bsi_last (bb
);
4989 if (!bsi_end_p (bsi
))
4992 if (need_fake_edge_p (t
))
4996 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4997 if (e
->dest
== EXIT_BLOCK_PTR
)
4999 bsi_insert_on_edge (e
, build_empty_stmt ());
5000 bsi_commit_edge_inserts ((int *)NULL
);
5006 /* Now add fake edges to the function exit for any non constant
5007 calls since there is no way that we can determine if they will
5009 for (i
= 0; i
< last_bb
; i
++)
5011 basic_block bb
= BASIC_BLOCK (i
);
5012 block_stmt_iterator bsi
;
5013 tree stmt
, last_stmt
;
5018 if (blocks
&& !TEST_BIT (blocks
, i
))
5021 bsi
= bsi_last (bb
);
5022 if (!bsi_end_p (bsi
))
5024 last_stmt
= bsi_stmt (bsi
);
5027 stmt
= bsi_stmt (bsi
);
5028 if (need_fake_edge_p (stmt
))
5031 /* The handling above of the final block before the
5032 epilogue should be enough to verify that there is
5033 no edge to the exit block in CFG already.
5034 Calling make_edge in such case would cause us to
5035 mark that edge as fake and remove it later. */
5036 #ifdef ENABLE_CHECKING
5037 if (stmt
== last_stmt
)
5040 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5041 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
5045 /* Note that the following may create a new basic block
5046 and renumber the existing basic blocks. */
5047 if (stmt
!= last_stmt
)
5049 e
= split_block (bb
, stmt
);
5053 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
5057 while (!bsi_end_p (bsi
));
5062 verify_flow_info ();
5064 return blocks_split
;
5068 tree_purge_dead_eh_edges (basic_block bb
)
5070 bool changed
= false;
5073 tree stmt
= last_stmt (bb
);
5075 if (stmt
&& tree_can_throw_internal (stmt
))
5078 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5080 if (e
->flags
& EDGE_EH
)
5082 ssa_remove_edge (e
);
5089 /* Removal of dead EH edges might change dominators of not
5090 just immediate successors. E.g. when bb1 is changed so that
5091 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5092 eh edges purged by this function in:
5104 idom(bb5) must be recomputed. For now just free the dominance
5107 free_dominance_info (CDI_DOMINATORS
);
5113 tree_purge_all_dead_eh_edges (bitmap blocks
)
5115 bool changed
= false;
5119 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
5121 changed
|= tree_purge_dead_eh_edges (BASIC_BLOCK (i
));
5127 struct cfg_hooks tree_cfg_hooks
= {
5129 tree_verify_flow_info
,
5130 tree_dump_bb
, /* dump_bb */
5131 create_bb
, /* create_basic_block */
5132 tree_redirect_edge_and_branch
,/* redirect_edge_and_branch */
5133 tree_redirect_edge_and_branch_force
,/* redirect_edge_and_branch_force */
5134 remove_bb
, /* delete_basic_block */
5135 tree_split_block
, /* split_block */
5136 tree_move_block_after
, /* move_block_after */
5137 tree_can_merge_blocks_p
, /* can_merge_blocks_p */
5138 tree_merge_blocks
, /* merge_blocks */
5139 tree_predict_edge
, /* predict_edge */
5140 tree_predicted_by_p
, /* predicted_by_p */
5141 tree_can_duplicate_bb_p
, /* can_duplicate_block_p */
5142 tree_duplicate_bb
, /* duplicate_block */
5143 tree_split_edge
, /* split_edge */
5144 tree_make_forwarder_block
, /* make_forward_block */
5145 NULL
, /* tidy_fallthru_edge */
5146 tree_block_ends_with_call_p
, /* block_ends_with_call_p */
5147 tree_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
5148 tree_flow_call_edges_add
/* flow_call_edges_add */
5152 /* Split all critical edges. */
5155 split_critical_edges (void)
5163 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5164 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
5171 struct tree_opt_pass pass_split_crit_edges
=
5173 "crited", /* name */
5175 split_critical_edges
, /* execute */
5178 0, /* static_pass_number */
5179 TV_TREE_SPLIT_EDGES
, /* tv_id */
5180 PROP_cfg
, /* properties required */
5181 PROP_no_crit_edges
, /* properties_provided */
5182 0, /* properties_destroyed */
5183 0, /* todo_flags_start */
5184 TODO_dump_func
, /* todo_flags_finish */
5189 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5190 a temporary, make sure and register it to be renamed if necessary,
5191 and finally return the temporary. Put the statements to compute
5192 EXP before the current statement in BSI. */
5195 gimplify_val (block_stmt_iterator
*bsi
, tree type
, tree exp
)
5197 tree t
, new_stmt
, orig_stmt
;
5199 if (is_gimple_val (exp
))
5202 t
= make_rename_temp (type
, NULL
);
5203 new_stmt
= build (MODIFY_EXPR
, type
, t
, exp
);
5205 orig_stmt
= bsi_stmt (*bsi
);
5206 SET_EXPR_LOCUS (new_stmt
, EXPR_LOCUS (orig_stmt
));
5207 TREE_BLOCK (new_stmt
) = TREE_BLOCK (orig_stmt
);
5209 bsi_insert_before (bsi
, new_stmt
, BSI_SAME_STMT
);
5214 /* Build a ternary operation and gimplify it. Emit code before BSI.
5215 Return the gimple_val holding the result. */
5218 gimplify_build3 (block_stmt_iterator
*bsi
, enum tree_code code
,
5219 tree type
, tree a
, tree b
, tree c
)
5223 ret
= fold (build3 (code
, type
, a
, b
, c
));
5226 return gimplify_val (bsi
, type
, ret
);
5229 /* Build a binary operation and gimplify it. Emit code before BSI.
5230 Return the gimple_val holding the result. */
5233 gimplify_build2 (block_stmt_iterator
*bsi
, enum tree_code code
,
5234 tree type
, tree a
, tree b
)
5238 ret
= fold (build2 (code
, type
, a
, b
));
5241 return gimplify_val (bsi
, type
, ret
);
5244 /* Build a unary operation and gimplify it. Emit code before BSI.
5245 Return the gimple_val holding the result. */
5248 gimplify_build1 (block_stmt_iterator
*bsi
, enum tree_code code
, tree type
,
5253 ret
= fold (build1 (code
, type
, a
));
5256 return gimplify_val (bsi
, type
, ret
);
5261 /* Emit return warnings. */
5264 execute_warn_function_return (void)
5266 #ifdef USE_MAPPED_LOCATION
5267 source_location location
;
5275 if (warn_missing_noreturn
5276 && !TREE_THIS_VOLATILE (cfun
->decl
)
5277 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0
5278 && !lang_hooks
.function
.missing_noreturn_ok_p (cfun
->decl
))
5279 warning ("%Jfunction might be possible candidate for "
5280 "attribute %<noreturn%>",
5283 /* If we have a path to EXIT, then we do return. */
5284 if (TREE_THIS_VOLATILE (cfun
->decl
)
5285 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
5287 #ifdef USE_MAPPED_LOCATION
5288 location
= UNKNOWN_LOCATION
;
5292 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5294 last
= last_stmt (e
->src
);
5295 if (TREE_CODE (last
) == RETURN_EXPR
5296 #ifdef USE_MAPPED_LOCATION
5297 && (location
= EXPR_LOCATION (last
)) != UNKNOWN_LOCATION
)
5299 && (locus
= EXPR_LOCUS (last
)) != NULL
)
5303 #ifdef USE_MAPPED_LOCATION
5304 if (location
== UNKNOWN_LOCATION
)
5305 location
= cfun
->function_end_locus
;
5306 warning ("%H%<noreturn%> function does return", &location
);
5309 locus
= &cfun
->function_end_locus
;
5310 warning ("%H%<noreturn%> function does return", locus
);
5314 /* If we see "return;" in some basic block, then we do reach the end
5315 without returning a value. */
5316 else if (warn_return_type
5317 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
5318 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
5320 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5322 tree last
= last_stmt (e
->src
);
5323 if (TREE_CODE (last
) == RETURN_EXPR
5324 && TREE_OPERAND (last
, 0) == NULL
)
5326 #ifdef USE_MAPPED_LOCATION
5327 location
= EXPR_LOCATION (last
);
5328 if (location
== UNKNOWN_LOCATION
)
5329 location
= cfun
->function_end_locus
;
5330 warning ("%Hcontrol reaches end of non-void function", &location
);
5332 locus
= EXPR_LOCUS (last
);
5334 locus
= &cfun
->function_end_locus
;
5335 warning ("%Hcontrol reaches end of non-void function", locus
);
5344 /* Given a basic block B which ends with a conditional and has
5345 precisely two successors, determine which of the edges is taken if
5346 the conditional is true and which is taken if the conditional is
5347 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5350 extract_true_false_edges_from_block (basic_block b
,
5354 edge e
= EDGE_SUCC (b
, 0);
5356 if (e
->flags
& EDGE_TRUE_VALUE
)
5359 *false_edge
= EDGE_SUCC (b
, 1);
5364 *true_edge
= EDGE_SUCC (b
, 1);
5368 struct tree_opt_pass pass_warn_function_return
=
5372 execute_warn_function_return
, /* execute */
5375 0, /* static_pass_number */
5377 PROP_cfg
, /* properties_required */
5378 0, /* properties_provided */
5379 0, /* properties_destroyed */
5380 0, /* todo_flags_start */
5381 0, /* todo_flags_finish */
5385 #include "gt-tree-cfg.h"