1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
46 #include "cfglayout.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity
= 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map
;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
86 static htab_t edge_to_cases
;
91 long num_merged_labels
;
94 static struct cfg_stats_d cfg_stats
;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto
;
99 /* Basic blocks and flowgraphs. */
100 static basic_block
create_bb (void *, void *, basic_block
);
101 static void create_block_annotation (basic_block
);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree
);
105 static void factor_computed_gotos (void);
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block
);
110 static void make_exit_edges (basic_block
);
111 static void make_cond_expr_edges (basic_block
);
112 static void make_switch_expr_edges (basic_block
);
113 static void make_goto_expr_edges (basic_block
);
114 static edge
tree_redirect_edge_and_branch (edge
, basic_block
);
115 static edge
tree_try_redirect_by_replacing_jump (edge
, basic_block
);
116 static void split_critical_edges (void);
118 /* Various helpers. */
119 static inline bool stmt_starts_bb_p (tree
, tree
);
120 static int tree_verify_flow_info (void);
121 static void tree_make_forwarder_block (edge
);
122 static bool tree_forwarder_block_p (basic_block
);
123 static void tree_cfg2vcg (FILE *);
125 /* Flowgraph optimization and cleanup. */
126 static void tree_merge_blocks (basic_block
, basic_block
);
127 static bool tree_can_merge_blocks_p (basic_block
, basic_block
);
128 static void remove_bb (basic_block
);
129 static bool cleanup_control_flow (void);
130 static bool cleanup_control_expr_graph (basic_block
, block_stmt_iterator
);
131 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
132 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
133 static tree
find_case_label_for_value (tree
, tree
);
134 static bool phi_alternatives_equal (basic_block
, edge
, edge
);
135 static bool cleanup_forwarder_blocks (void);
138 /*---------------------------------------------------------------------------
140 ---------------------------------------------------------------------------*/
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
146 build_tree_cfg (tree
*tp
)
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
151 /* Initialize rbi_pool. */
154 /* Initialize the basic block array. */
156 profile_status
= PROFILE_ABSENT
;
158 last_basic_block
= 0;
159 VARRAY_BB_INIT (basic_block_info
, initial_cfg_capacity
, "basic_block_info");
160 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
162 /* Build a mapping of labels to their associated blocks. */
163 VARRAY_BB_INIT (label_to_block_map
, initial_cfg_capacity
,
164 "label to block map");
166 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
167 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
169 found_computed_goto
= 0;
172 /* Computed gotos are hell to deal with, especially if there are
173 lots of them with a large number of destinations. So we factor
174 them to a common computed goto location before we build the
175 edge list. After we convert back to normal form, we will un-factor
176 the computed gotos since factoring introduces an unwanted jump. */
177 if (found_computed_goto
)
178 factor_computed_gotos ();
180 /* Make sure there is always at least one block, even if it's empty. */
181 if (n_basic_blocks
== 0)
182 create_empty_bb (ENTRY_BLOCK_PTR
);
184 create_block_annotation (ENTRY_BLOCK_PTR
);
185 create_block_annotation (EXIT_BLOCK_PTR
);
187 /* Adjust the size of the array. */
188 VARRAY_GROW (basic_block_info
, n_basic_blocks
);
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
198 /* Create the edges of the flowgraph. */
201 /* Debugging dumps. */
203 /* Write the flowgraph to a VCG file. */
205 int local_dump_flags
;
206 FILE *dump_file
= dump_begin (TDI_vcg
, &local_dump_flags
);
209 tree_cfg2vcg (dump_file
);
210 dump_end (TDI_vcg
, dump_file
);
214 /* Dump a textual representation of the flowgraph. */
216 dump_tree_cfg (dump_file
, dump_flags
);
220 execute_build_cfg (void)
222 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl
));
225 struct tree_opt_pass pass_build_cfg
=
229 execute_build_cfg
, /* execute */
232 0, /* static_pass_number */
233 TV_TREE_CFG
, /* tv_id */
234 PROP_gimple_leh
, /* properties_required */
235 PROP_cfg
, /* properties_provided */
236 0, /* properties_destroyed */
237 0, /* todo_flags_start */
238 TODO_verify_stmts
, /* todo_flags_finish */
242 /* Search the CFG for any computed gotos. If found, factor them to a
243 common computed goto site. Also record the location of that site so
244 that we can un-factor the gotos after we have converted back to
248 factor_computed_gotos (void)
251 tree factored_label_decl
= NULL
;
253 tree factored_computed_goto_label
= NULL
;
254 tree factored_computed_goto
= NULL
;
256 /* We know there are one or more computed gotos in this function.
257 Examine the last statement in each basic block to see if the block
258 ends with a computed goto. */
262 block_stmt_iterator bsi
= bsi_last (bb
);
267 last
= bsi_stmt (bsi
);
269 /* Ignore the computed goto we create when we factor the original
271 if (last
== factored_computed_goto
)
274 /* If the last statement is a computed goto, factor it. */
275 if (computed_goto_p (last
))
279 /* The first time we find a computed goto we need to create
280 the factored goto block and the variable each original
281 computed goto will use for their goto destination. */
282 if (! factored_computed_goto
)
284 basic_block new_bb
= create_empty_bb (bb
);
285 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
287 /* Create the destination of the factored goto. Each original
288 computed goto will put its desired destination into this
289 variable and jump to the label we create immediately
291 var
= create_tmp_var (ptr_type_node
, "gotovar");
293 /* Build a label for the new block which will contain the
294 factored computed goto. */
295 factored_label_decl
= create_artificial_label ();
296 factored_computed_goto_label
297 = build1 (LABEL_EXPR
, void_type_node
, factored_label_decl
);
298 bsi_insert_after (&new_bsi
, factored_computed_goto_label
,
301 /* Build our new computed goto. */
302 factored_computed_goto
= build1 (GOTO_EXPR
, void_type_node
, var
);
303 bsi_insert_after (&new_bsi
, factored_computed_goto
,
307 /* Copy the original computed goto's destination into VAR. */
308 assignment
= build (MODIFY_EXPR
, ptr_type_node
,
309 var
, GOTO_DESTINATION (last
));
310 bsi_insert_before (&bsi
, assignment
, BSI_SAME_STMT
);
312 /* And re-vector the computed goto to the new destination. */
313 GOTO_DESTINATION (last
) = factored_label_decl
;
319 /* Create annotations for a single basic block. */
322 create_block_annotation (basic_block bb
)
324 /* Verify that the tree_annotations field is clear. */
325 gcc_assert (!bb
->tree_annotations
);
326 bb
->tree_annotations
= ggc_alloc_cleared (sizeof (struct bb_ann_d
));
330 /* Free the annotations for all the basic blocks. */
332 static void free_blocks_annotations (void)
334 clear_blocks_annotations ();
338 /* Clear the annotations for all the basic blocks. */
341 clear_blocks_annotations (void)
345 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
346 bb
->tree_annotations
= NULL
;
350 /* Build a flowgraph for the statement_list STMT_LIST. */
353 make_blocks (tree stmt_list
)
355 tree_stmt_iterator i
= tsi_start (stmt_list
);
357 bool start_new_block
= true;
358 bool first_stmt_of_list
= true;
359 basic_block bb
= ENTRY_BLOCK_PTR
;
361 while (!tsi_end_p (i
))
368 /* If the statement starts a new basic block or if we have determined
369 in a previous pass that we need to create a new block for STMT, do
371 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
373 if (!first_stmt_of_list
)
374 stmt_list
= tsi_split_statement_list_before (&i
);
375 bb
= create_basic_block (stmt_list
, NULL
, bb
);
376 start_new_block
= false;
379 /* Now add STMT to BB and create the subgraphs for special statement
381 set_bb_for_stmt (stmt
, bb
);
383 if (computed_goto_p (stmt
))
384 found_computed_goto
= true;
386 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
388 if (stmt_ends_bb_p (stmt
))
389 start_new_block
= true;
392 first_stmt_of_list
= false;
397 /* Create and return a new empty basic block after bb AFTER. */
400 create_bb (void *h
, void *e
, basic_block after
)
406 /* Create and initialize a new basic block. Since alloc_block uses
407 ggc_alloc_cleared to allocate a basic block, we do not have to
408 clear the newly allocated basic block here. */
411 bb
->index
= last_basic_block
;
413 bb
->stmt_list
= h
? h
: alloc_stmt_list ();
415 /* Add the new block to the linked list of blocks. */
416 link_block (bb
, after
);
418 /* Grow the basic block array if needed. */
419 if ((size_t) last_basic_block
== VARRAY_SIZE (basic_block_info
))
421 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
422 VARRAY_GROW (basic_block_info
, new_size
);
425 /* Add the newly created block to the array. */
426 BASIC_BLOCK (last_basic_block
) = bb
;
428 create_block_annotation (bb
);
433 initialize_bb_rbi (bb
);
438 /*---------------------------------------------------------------------------
440 ---------------------------------------------------------------------------*/
442 /* Join all the blocks in the flowgraph. */
449 /* Create an edge from entry to the first block with executable
451 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
453 /* Traverse the basic block array placing edges. */
456 tree first
= first_stmt (bb
);
457 tree last
= last_stmt (bb
);
461 /* Edges for statements that always alter flow control. */
462 if (is_ctrl_stmt (last
))
463 make_ctrl_stmt_edges (bb
);
465 /* Edges for statements that sometimes alter flow control. */
466 if (is_ctrl_altering_stmt (last
))
467 make_exit_edges (bb
);
470 /* Finally, if no edges were created above, this is a regular
471 basic block that only needs a fallthru edge. */
472 if (EDGE_COUNT (bb
->succs
) == 0)
473 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
476 /* We do not care about fake edges, so remove any that the CFG
477 builder inserted for completeness. */
478 remove_fake_exit_edges ();
480 /* Clean up the graph and warn for unreachable code. */
485 /* Create edges for control statement at basic block BB. */
488 make_ctrl_stmt_edges (basic_block bb
)
490 tree last
= last_stmt (bb
);
493 switch (TREE_CODE (last
))
496 make_goto_expr_edges (bb
);
500 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
504 make_cond_expr_edges (bb
);
508 make_switch_expr_edges (bb
);
512 make_eh_edges (last
);
513 /* Yet another NORETURN hack. */
514 if (EDGE_COUNT (bb
->succs
) == 0)
515 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
524 /* Create exit edges for statements in block BB that alter the flow of
525 control. Statements that alter the control flow are 'goto', 'return'
526 and calls to non-returning functions. */
529 make_exit_edges (basic_block bb
)
531 tree last
= last_stmt (bb
), op
;
534 switch (TREE_CODE (last
))
537 /* If this function receives a nonlocal goto, then we need to
538 make edges from this call site to all the nonlocal goto
540 if (TREE_SIDE_EFFECTS (last
)
541 && current_function_has_nonlocal_label
)
542 make_goto_expr_edges (bb
);
544 /* If this statement has reachable exception handlers, then
545 create abnormal edges to them. */
546 make_eh_edges (last
);
548 /* Some calls are known not to return. For such calls we create
551 We really need to revamp how we build edges so that it's not
552 such a bloody pain to avoid creating edges for this case since
553 all we do is remove these edges when we're done building the
555 if (call_expr_flags (last
) & ECF_NORETURN
)
557 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
561 /* Don't forget the fall-thru edge. */
562 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
566 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
567 may have an abnormal edge. Search the RHS for this case and
568 create any required edges. */
569 op
= get_call_expr_in (last
);
570 if (op
&& TREE_SIDE_EFFECTS (op
)
571 && current_function_has_nonlocal_label
)
572 make_goto_expr_edges (bb
);
574 make_eh_edges (last
);
575 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
584 /* Create the edges for a COND_EXPR starting at block BB.
585 At this point, both clauses must contain only simple gotos. */
588 make_cond_expr_edges (basic_block bb
)
590 tree entry
= last_stmt (bb
);
591 basic_block then_bb
, else_bb
;
592 tree then_label
, else_label
;
595 gcc_assert (TREE_CODE (entry
) == COND_EXPR
);
597 /* Entry basic blocks for each component. */
598 then_label
= GOTO_DESTINATION (COND_EXPR_THEN (entry
));
599 else_label
= GOTO_DESTINATION (COND_EXPR_ELSE (entry
));
600 then_bb
= label_to_block (then_label
);
601 else_bb
= label_to_block (else_label
);
603 make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
604 make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
607 /* Hashing routine for EDGE_TO_CASES. */
610 edge_to_cases_hash (const void *p
)
612 edge e
= ((struct edge_to_cases_elt
*)p
)->e
;
614 /* Hash on the edge itself (which is a pointer). */
615 return htab_hash_pointer (e
);
618 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
619 for equality is just a pointer comparison. */
622 edge_to_cases_eq (const void *p1
, const void *p2
)
624 edge e1
= ((struct edge_to_cases_elt
*)p1
)->e
;
625 edge e2
= ((struct edge_to_cases_elt
*)p2
)->e
;
630 /* Called for each element in the hash table (P) as we delete the
631 edge to cases hash table.
633 Clear all the TREE_CHAINs to prevent problems with copying of
634 SWITCH_EXPRs and structure sharing rules, then free the hash table
638 edge_to_cases_cleanup (void *p
)
640 struct edge_to_cases_elt
*elt
= p
;
643 for (t
= elt
->case_labels
; t
; t
= next
)
645 next
= TREE_CHAIN (t
);
646 TREE_CHAIN (t
) = NULL
;
651 /* Start recording information mapping edges to case labels. */
654 start_recording_case_labels (void)
656 gcc_assert (edge_to_cases
== NULL
);
658 edge_to_cases
= htab_create (37,
661 edge_to_cases_cleanup
);
664 /* Return nonzero if we are recording information for case labels. */
667 recording_case_labels_p (void)
669 return (edge_to_cases
!= NULL
);
672 /* Stop recording information mapping edges to case labels and
673 remove any information we have recorded. */
675 end_recording_case_labels (void)
677 htab_delete (edge_to_cases
);
678 edge_to_cases
= NULL
;
681 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
684 record_switch_edge (edge e
, tree case_label
)
686 struct edge_to_cases_elt
*elt
;
689 /* Build a hash table element so we can see if E is already
691 elt
= xmalloc (sizeof (struct edge_to_cases_elt
));
693 elt
->case_labels
= case_label
;
695 slot
= htab_find_slot (edge_to_cases
, elt
, INSERT
);
699 /* E was not in the hash table. Install E into the hash table. */
704 /* E was already in the hash table. Free ELT as we do not need it
708 /* Get the entry stored in the hash table. */
709 elt
= (struct edge_to_cases_elt
*) *slot
;
711 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
712 TREE_CHAIN (case_label
) = elt
->case_labels
;
713 elt
->case_labels
= case_label
;
717 /* If we are inside a {start,end}_recording_cases block, then return
718 a chain of CASE_LABEL_EXPRs from T which reference E.
720 Otherwise return NULL. */
723 get_cases_for_edge (edge e
, tree t
)
725 struct edge_to_cases_elt elt
, *elt_p
;
730 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
731 chains available. Return NULL so the caller can detect this case. */
732 if (!recording_case_labels_p ())
737 elt
.case_labels
= NULL
;
738 slot
= htab_find_slot (edge_to_cases
, &elt
, NO_INSERT
);
742 elt_p
= (struct edge_to_cases_elt
*)*slot
;
743 return elt_p
->case_labels
;
746 /* If we did not find E in the hash table, then this must be the first
747 time we have been queried for information about E & T. Add all the
748 elements from T to the hash table then perform the query again. */
750 vec
= SWITCH_LABELS (t
);
751 n
= TREE_VEC_LENGTH (vec
);
752 for (i
= 0; i
< n
; i
++)
754 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
755 basic_block label_bb
= label_to_block (lab
);
756 record_switch_edge (find_edge (e
->src
, label_bb
), TREE_VEC_ELT (vec
, i
));
761 /* Create the edges for a SWITCH_EXPR starting at block BB.
762 At this point, the switch body has been lowered and the
763 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
766 make_switch_expr_edges (basic_block bb
)
768 tree entry
= last_stmt (bb
);
772 vec
= SWITCH_LABELS (entry
);
773 n
= TREE_VEC_LENGTH (vec
);
775 for (i
= 0; i
< n
; ++i
)
777 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
778 basic_block label_bb
= label_to_block (lab
);
779 make_edge (bb
, label_bb
, 0);
784 /* Return the basic block holding label DEST. */
787 label_to_block (tree dest
)
789 int uid
= LABEL_DECL_UID (dest
);
791 /* We would die hard when faced by an undefined label. Emit a label to
792 the very first basic block. This will hopefully make even the dataflow
793 and undefined variable warnings quite right. */
794 if ((errorcount
|| sorrycount
) && uid
< 0)
796 block_stmt_iterator bsi
= bsi_start (BASIC_BLOCK (0));
799 stmt
= build1 (LABEL_EXPR
, void_type_node
, dest
);
800 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
801 uid
= LABEL_DECL_UID (dest
);
803 return VARRAY_BB (label_to_block_map
, uid
);
807 /* Create edges for a goto statement at block BB. */
810 make_goto_expr_edges (basic_block bb
)
813 basic_block target_bb
;
815 block_stmt_iterator last
= bsi_last (bb
);
817 goto_t
= bsi_stmt (last
);
819 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
820 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
821 from a nonlocal goto. */
822 if (TREE_CODE (goto_t
) != GOTO_EXPR
)
824 dest
= error_mark_node
;
829 dest
= GOTO_DESTINATION (goto_t
);
832 /* A GOTO to a local label creates normal edges. */
833 if (simple_goto_p (goto_t
))
835 edge e
= make_edge (bb
, label_to_block (dest
), EDGE_FALLTHRU
);
836 #ifdef USE_MAPPED_LOCATION
837 e
->goto_locus
= EXPR_LOCATION (goto_t
);
839 e
->goto_locus
= EXPR_LOCUS (goto_t
);
845 /* Nothing more to do for nonlocal gotos. */
846 if (TREE_CODE (dest
) == LABEL_DECL
)
849 /* Computed gotos remain. */
852 /* Look for the block starting with the destination label. In the
853 case of a computed goto, make an edge to any label block we find
855 FOR_EACH_BB (target_bb
)
857 block_stmt_iterator bsi
;
859 for (bsi
= bsi_start (target_bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
861 tree target
= bsi_stmt (bsi
);
863 if (TREE_CODE (target
) != LABEL_EXPR
)
867 /* Computed GOTOs. Make an edge to every label block that has
868 been marked as a potential target for a computed goto. */
869 (FORCED_LABEL (LABEL_EXPR_LABEL (target
)) && for_call
== 0)
870 /* Nonlocal GOTO target. Make an edge to every label block
871 that has been marked as a potential target for a nonlocal
873 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target
)) && for_call
== 1))
875 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
881 /* Degenerate case of computed goto with no labels. */
882 if (!for_call
&& EDGE_COUNT (bb
->succs
) == 0)
883 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
887 /*---------------------------------------------------------------------------
889 ---------------------------------------------------------------------------*/
891 /* Remove unreachable blocks and other miscellaneous clean up work. */
894 cleanup_tree_cfg (void)
898 timevar_push (TV_TREE_CLEANUP_CFG
);
900 retval
= cleanup_control_flow ();
901 retval
|= delete_unreachable_blocks ();
903 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
904 which can get expensive. So we want to enable recording of edge
905 to CASE_LABEL_EXPR mappings around the call to
906 cleanup_forwarder_blocks. */
907 start_recording_case_labels ();
908 retval
|= cleanup_forwarder_blocks ();
909 end_recording_case_labels ();
911 #ifdef ENABLE_CHECKING
914 gcc_assert (!cleanup_control_flow ());
915 gcc_assert (!delete_unreachable_blocks ());
916 gcc_assert (!cleanup_forwarder_blocks ());
920 /* Merging the blocks creates no new opportunities for the other
921 optimizations, so do it here. */
922 retval
|= merge_seq_blocks ();
926 #ifdef ENABLE_CHECKING
929 timevar_pop (TV_TREE_CLEANUP_CFG
);
934 /* Cleanup useless labels in basic blocks. This is something we wish
935 to do early because it allows us to group case labels before creating
936 the edges for the CFG, and it speeds up block statement iterators in
938 We only run this pass once, running it more than once is probably not
941 /* A map from basic block index to the leading label of that block. */
942 static tree
*label_for_bb
;
944 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
946 update_eh_label (struct eh_region
*region
)
948 tree old_label
= get_eh_region_tree_label (region
);
952 basic_block bb
= label_to_block (old_label
);
954 /* ??? After optimizing, there may be EH regions with labels
955 that have already been removed from the function body, so
956 there is no basic block for them. */
960 new_label
= label_for_bb
[bb
->index
];
961 set_eh_region_tree_label (region
, new_label
);
965 /* Given LABEL return the first label in the same basic block. */
967 main_block_label (tree label
)
969 basic_block bb
= label_to_block (label
);
971 /* label_to_block possibly inserted undefined label into the chain. */
972 if (!label_for_bb
[bb
->index
])
973 label_for_bb
[bb
->index
] = label
;
974 return label_for_bb
[bb
->index
];
977 /* Cleanup redundant labels. This is a three-step process:
978 1) Find the leading label for each block.
979 2) Redirect all references to labels to the leading labels.
980 3) Cleanup all useless labels. */
983 cleanup_dead_labels (void)
986 label_for_bb
= xcalloc (last_basic_block
, sizeof (tree
));
988 /* Find a suitable label for each block. We use the first user-defined
989 label if there is one, or otherwise just the first label we see. */
992 block_stmt_iterator i
;
994 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
996 tree label
, stmt
= bsi_stmt (i
);
998 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1001 label
= LABEL_EXPR_LABEL (stmt
);
1003 /* If we have not yet seen a label for the current block,
1004 remember this one and see if there are more labels. */
1005 if (! label_for_bb
[bb
->index
])
1007 label_for_bb
[bb
->index
] = label
;
1011 /* If we did see a label for the current block already, but it
1012 is an artificially created label, replace it if the current
1013 label is a user defined label. */
1014 if (! DECL_ARTIFICIAL (label
)
1015 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
]))
1017 label_for_bb
[bb
->index
] = label
;
1023 /* Now redirect all jumps/branches to the selected label.
1024 First do so for each block ending in a control statement. */
1027 tree stmt
= last_stmt (bb
);
1031 switch (TREE_CODE (stmt
))
1035 tree true_branch
, false_branch
;
1037 true_branch
= COND_EXPR_THEN (stmt
);
1038 false_branch
= COND_EXPR_ELSE (stmt
);
1040 GOTO_DESTINATION (true_branch
)
1041 = main_block_label (GOTO_DESTINATION (true_branch
));
1042 GOTO_DESTINATION (false_branch
)
1043 = main_block_label (GOTO_DESTINATION (false_branch
));
1051 tree vec
= SWITCH_LABELS (stmt
);
1052 size_t n
= TREE_VEC_LENGTH (vec
);
1054 /* Replace all destination labels. */
1055 for (i
= 0; i
< n
; ++i
)
1057 tree elt
= TREE_VEC_ELT (vec
, i
);
1058 tree label
= main_block_label (CASE_LABEL (elt
));
1059 CASE_LABEL (elt
) = label
;
1064 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1065 remove them until after we've created the CFG edges. */
1067 if (! computed_goto_p (stmt
))
1069 GOTO_DESTINATION (stmt
)
1070 = main_block_label (GOTO_DESTINATION (stmt
));
1079 for_each_eh_region (update_eh_label
);
1081 /* Finally, purge dead labels. All user-defined labels and labels that
1082 can be the target of non-local gotos are preserved. */
1085 block_stmt_iterator i
;
1086 tree label_for_this_bb
= label_for_bb
[bb
->index
];
1088 if (! label_for_this_bb
)
1091 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
1093 tree label
, stmt
= bsi_stmt (i
);
1095 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1098 label
= LABEL_EXPR_LABEL (stmt
);
1100 if (label
== label_for_this_bb
1101 || ! DECL_ARTIFICIAL (label
)
1102 || DECL_NONLOCAL (label
))
1109 free (label_for_bb
);
1112 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1113 and scan the sorted vector of cases. Combine the ones jumping to the
1115 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1118 group_case_labels (void)
1124 tree stmt
= last_stmt (bb
);
1125 if (stmt
&& TREE_CODE (stmt
) == SWITCH_EXPR
)
1127 tree labels
= SWITCH_LABELS (stmt
);
1128 int old_size
= TREE_VEC_LENGTH (labels
);
1129 int i
, j
, new_size
= old_size
;
1130 tree default_case
= TREE_VEC_ELT (labels
, old_size
- 1);
1133 /* The default label is always the last case in a switch
1134 statement after gimplification. */
1135 default_label
= CASE_LABEL (default_case
);
1137 /* Look for possible opportunities to merge cases.
1138 Ignore the last element of the label vector because it
1139 must be the default case. */
1141 while (i
< old_size
- 1)
1143 tree base_case
, base_label
, base_high
, type
;
1144 base_case
= TREE_VEC_ELT (labels
, i
);
1146 gcc_assert (base_case
);
1147 base_label
= CASE_LABEL (base_case
);
1149 /* Discard cases that have the same destination as the
1151 if (base_label
== default_label
)
1153 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1159 type
= TREE_TYPE (CASE_LOW (base_case
));
1160 base_high
= CASE_HIGH (base_case
) ?
1161 CASE_HIGH (base_case
) : CASE_LOW (base_case
);
1163 /* Try to merge case labels. Break out when we reach the end
1164 of the label vector or when we cannot merge the next case
1165 label with the current one. */
1166 while (i
< old_size
- 1)
1168 tree merge_case
= TREE_VEC_ELT (labels
, i
);
1169 tree merge_label
= CASE_LABEL (merge_case
);
1170 tree t
= int_const_binop (PLUS_EXPR
, base_high
,
1171 integer_one_node
, 1);
1173 /* Merge the cases if they jump to the same place,
1174 and their ranges are consecutive. */
1175 if (merge_label
== base_label
1176 && tree_int_cst_equal (CASE_LOW (merge_case
), t
))
1178 base_high
= CASE_HIGH (merge_case
) ?
1179 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1180 CASE_HIGH (base_case
) = base_high
;
1181 TREE_VEC_ELT (labels
, i
) = NULL_TREE
;
1190 /* Compress the case labels in the label vector, and adjust the
1191 length of the vector. */
1192 for (i
= 0, j
= 0; i
< new_size
; i
++)
1194 while (! TREE_VEC_ELT (labels
, j
))
1196 TREE_VEC_ELT (labels
, i
) = TREE_VEC_ELT (labels
, j
++);
1198 TREE_VEC_LENGTH (labels
) = new_size
;
1203 /* Checks whether we can merge block B into block A. */
1206 tree_can_merge_blocks_p (basic_block a
, basic_block b
)
1209 block_stmt_iterator bsi
;
1211 if (EDGE_COUNT (a
->succs
) != 1)
1214 if (EDGE_SUCC (a
, 0)->flags
& EDGE_ABNORMAL
)
1217 if (EDGE_SUCC (a
, 0)->dest
!= b
)
1220 if (b
== EXIT_BLOCK_PTR
)
1223 if (EDGE_COUNT (b
->preds
) > 1)
1226 /* If A ends by a statement causing exceptions or something similar, we
1227 cannot merge the blocks. */
1228 stmt
= last_stmt (a
);
1229 if (stmt
&& stmt_ends_bb_p (stmt
))
1232 /* Do not allow a block with only a non-local label to be merged. */
1233 if (stmt
&& TREE_CODE (stmt
) == LABEL_EXPR
1234 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
1237 /* There may be no phi nodes at the start of b. Most of these degenerate
1238 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1242 /* Do not remove user labels. */
1243 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1245 stmt
= bsi_stmt (bsi
);
1246 if (TREE_CODE (stmt
) != LABEL_EXPR
)
1248 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt
)))
1256 /* Merge block B into block A. */
1259 tree_merge_blocks (basic_block a
, basic_block b
)
1261 block_stmt_iterator bsi
;
1262 tree_stmt_iterator last
;
1265 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1267 /* Ensure that B follows A. */
1268 move_block_after (b
, a
);
1270 gcc_assert (EDGE_SUCC (a
, 0)->flags
& EDGE_FALLTHRU
);
1271 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1273 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1274 for (bsi
= bsi_start (b
); !bsi_end_p (bsi
);)
1276 if (TREE_CODE (bsi_stmt (bsi
)) == LABEL_EXPR
)
1280 set_bb_for_stmt (bsi_stmt (bsi
), a
);
1285 /* Merge the chains. */
1286 last
= tsi_last (a
->stmt_list
);
1287 tsi_link_after (&last
, b
->stmt_list
, TSI_NEW_STMT
);
1288 b
->stmt_list
= NULL
;
1292 /* Walk the function tree removing unnecessary statements.
1294 * Empty statement nodes are removed
1296 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1298 * Unnecessary COND_EXPRs are removed
1300 * Some unnecessary BIND_EXPRs are removed
1302 Clearly more work could be done. The trick is doing the analysis
1303 and removal fast enough to be a net improvement in compile times.
1305 Note that when we remove a control structure such as a COND_EXPR
1306 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1307 to ensure we eliminate all the useless code. */
1318 static void remove_useless_stmts_1 (tree
*, struct rus_data
*);
1321 remove_useless_stmts_warn_notreached (tree stmt
)
1323 if (EXPR_HAS_LOCATION (stmt
))
1325 location_t loc
= EXPR_LOCATION (stmt
);
1326 if (LOCATION_LINE (loc
) > 0)
1328 warning ("%Hwill never be executed", &loc
);
1333 switch (TREE_CODE (stmt
))
1335 case STATEMENT_LIST
:
1337 tree_stmt_iterator i
;
1338 for (i
= tsi_start (stmt
); !tsi_end_p (i
); tsi_next (&i
))
1339 if (remove_useless_stmts_warn_notreached (tsi_stmt (i
)))
1345 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt
)))
1347 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt
)))
1349 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt
)))
1353 case TRY_FINALLY_EXPR
:
1354 case TRY_CATCH_EXPR
:
1355 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 0)))
1357 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt
, 1)))
1362 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt
));
1363 case EH_FILTER_EXPR
:
1364 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt
));
1366 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt
));
1369 /* Not a live container. */
1377 remove_useless_stmts_cond (tree
*stmt_p
, struct rus_data
*data
)
1379 tree then_clause
, else_clause
, cond
;
1380 bool save_has_label
, then_has_label
, else_has_label
;
1382 save_has_label
= data
->has_label
;
1383 data
->has_label
= false;
1384 data
->last_goto
= NULL
;
1386 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p
), data
);
1388 then_has_label
= data
->has_label
;
1389 data
->has_label
= false;
1390 data
->last_goto
= NULL
;
1392 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p
), data
);
1394 else_has_label
= data
->has_label
;
1395 data
->has_label
= save_has_label
| then_has_label
| else_has_label
;
1397 then_clause
= COND_EXPR_THEN (*stmt_p
);
1398 else_clause
= COND_EXPR_ELSE (*stmt_p
);
1399 cond
= fold (COND_EXPR_COND (*stmt_p
));
1401 /* If neither arm does anything at all, we can remove the whole IF. */
1402 if (!TREE_SIDE_EFFECTS (then_clause
) && !TREE_SIDE_EFFECTS (else_clause
))
1404 *stmt_p
= build_empty_stmt ();
1405 data
->repeat
= true;
1408 /* If there are no reachable statements in an arm, then we can
1409 zap the entire conditional. */
1410 else if (integer_nonzerop (cond
) && !else_has_label
)
1412 if (warn_notreached
)
1413 remove_useless_stmts_warn_notreached (else_clause
);
1414 *stmt_p
= then_clause
;
1415 data
->repeat
= true;
1417 else if (integer_zerop (cond
) && !then_has_label
)
1419 if (warn_notreached
)
1420 remove_useless_stmts_warn_notreached (then_clause
);
1421 *stmt_p
= else_clause
;
1422 data
->repeat
= true;
1425 /* Check a couple of simple things on then/else with single stmts. */
1428 tree then_stmt
= expr_only (then_clause
);
1429 tree else_stmt
= expr_only (else_clause
);
1431 /* Notice branches to a common destination. */
1432 if (then_stmt
&& else_stmt
1433 && TREE_CODE (then_stmt
) == GOTO_EXPR
1434 && TREE_CODE (else_stmt
) == GOTO_EXPR
1435 && (GOTO_DESTINATION (then_stmt
) == GOTO_DESTINATION (else_stmt
)))
1437 *stmt_p
= then_stmt
;
1438 data
->repeat
= true;
1441 /* If the THEN/ELSE clause merely assigns a value to a variable or
1442 parameter which is already known to contain that value, then
1443 remove the useless THEN/ELSE clause. */
1444 else if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1447 && TREE_CODE (else_stmt
) == MODIFY_EXPR
1448 && TREE_OPERAND (else_stmt
, 0) == cond
1449 && integer_zerop (TREE_OPERAND (else_stmt
, 1)))
1450 COND_EXPR_ELSE (*stmt_p
) = alloc_stmt_list ();
1452 else if ((TREE_CODE (cond
) == EQ_EXPR
|| TREE_CODE (cond
) == NE_EXPR
)
1453 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1454 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1455 && TREE_CONSTANT (TREE_OPERAND (cond
, 1)))
1457 tree stmt
= (TREE_CODE (cond
) == EQ_EXPR
1458 ? then_stmt
: else_stmt
);
1459 tree
*location
= (TREE_CODE (cond
) == EQ_EXPR
1460 ? &COND_EXPR_THEN (*stmt_p
)
1461 : &COND_EXPR_ELSE (*stmt_p
));
1464 && TREE_CODE (stmt
) == MODIFY_EXPR
1465 && TREE_OPERAND (stmt
, 0) == TREE_OPERAND (cond
, 0)
1466 && TREE_OPERAND (stmt
, 1) == TREE_OPERAND (cond
, 1))
1467 *location
= alloc_stmt_list ();
1471 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1472 would be re-introduced during lowering. */
1473 data
->last_goto
= NULL
;
1478 remove_useless_stmts_tf (tree
*stmt_p
, struct rus_data
*data
)
1480 bool save_may_branch
, save_may_throw
;
1481 bool this_may_branch
, this_may_throw
;
1483 /* Collect may_branch and may_throw information for the body only. */
1484 save_may_branch
= data
->may_branch
;
1485 save_may_throw
= data
->may_throw
;
1486 data
->may_branch
= false;
1487 data
->may_throw
= false;
1488 data
->last_goto
= NULL
;
1490 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1492 this_may_branch
= data
->may_branch
;
1493 this_may_throw
= data
->may_throw
;
1494 data
->may_branch
|= save_may_branch
;
1495 data
->may_throw
|= save_may_throw
;
1496 data
->last_goto
= NULL
;
1498 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1500 /* If the body is empty, then we can emit the FINALLY block without
1501 the enclosing TRY_FINALLY_EXPR. */
1502 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 0)))
1504 *stmt_p
= TREE_OPERAND (*stmt_p
, 1);
1505 data
->repeat
= true;
1508 /* If the handler is empty, then we can emit the TRY block without
1509 the enclosing TRY_FINALLY_EXPR. */
1510 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1512 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1513 data
->repeat
= true;
1516 /* If the body neither throws, nor branches, then we can safely
1517 string the TRY and FINALLY blocks together. */
1518 else if (!this_may_branch
&& !this_may_throw
)
1520 tree stmt
= *stmt_p
;
1521 *stmt_p
= TREE_OPERAND (stmt
, 0);
1522 append_to_statement_list (TREE_OPERAND (stmt
, 1), stmt_p
);
1523 data
->repeat
= true;
1529 remove_useless_stmts_tc (tree
*stmt_p
, struct rus_data
*data
)
1531 bool save_may_throw
, this_may_throw
;
1532 tree_stmt_iterator i
;
1535 /* Collect may_throw information for the body only. */
1536 save_may_throw
= data
->may_throw
;
1537 data
->may_throw
= false;
1538 data
->last_goto
= NULL
;
1540 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 0), data
);
1542 this_may_throw
= data
->may_throw
;
1543 data
->may_throw
= save_may_throw
;
1545 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1546 if (!this_may_throw
)
1548 if (warn_notreached
)
1549 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p
, 1));
1550 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1551 data
->repeat
= true;
1555 /* Process the catch clause specially. We may be able to tell that
1556 no exceptions propagate past this point. */
1558 this_may_throw
= true;
1559 i
= tsi_start (TREE_OPERAND (*stmt_p
, 1));
1560 stmt
= tsi_stmt (i
);
1561 data
->last_goto
= NULL
;
1563 switch (TREE_CODE (stmt
))
1566 for (; !tsi_end_p (i
); tsi_next (&i
))
1568 stmt
= tsi_stmt (i
);
1569 /* If we catch all exceptions, then the body does not
1570 propagate exceptions past this point. */
1571 if (CATCH_TYPES (stmt
) == NULL
)
1572 this_may_throw
= false;
1573 data
->last_goto
= NULL
;
1574 remove_useless_stmts_1 (&CATCH_BODY (stmt
), data
);
1578 case EH_FILTER_EXPR
:
1579 if (EH_FILTER_MUST_NOT_THROW (stmt
))
1580 this_may_throw
= false;
1581 else if (EH_FILTER_TYPES (stmt
) == NULL
)
1582 this_may_throw
= false;
1583 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt
), data
);
1587 /* Otherwise this is a cleanup. */
1588 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p
, 1), data
);
1590 /* If the cleanup is empty, then we can emit the TRY block without
1591 the enclosing TRY_CATCH_EXPR. */
1592 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p
, 1)))
1594 *stmt_p
= TREE_OPERAND (*stmt_p
, 0);
1595 data
->repeat
= true;
1599 data
->may_throw
|= this_may_throw
;
1604 remove_useless_stmts_bind (tree
*stmt_p
, struct rus_data
*data
)
1608 /* First remove anything underneath the BIND_EXPR. */
1609 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p
), data
);
1611 /* If the BIND_EXPR has no variables, then we can pull everything
1612 up one level and remove the BIND_EXPR, unless this is the toplevel
1613 BIND_EXPR for the current function or an inlined function.
1615 When this situation occurs we will want to apply this
1616 optimization again. */
1617 block
= BIND_EXPR_BLOCK (*stmt_p
);
1618 if (BIND_EXPR_VARS (*stmt_p
) == NULL_TREE
1619 && *stmt_p
!= DECL_SAVED_TREE (current_function_decl
)
1621 || ! BLOCK_ABSTRACT_ORIGIN (block
)
1622 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
))
1625 *stmt_p
= BIND_EXPR_BODY (*stmt_p
);
1626 data
->repeat
= true;
1632 remove_useless_stmts_goto (tree
*stmt_p
, struct rus_data
*data
)
1634 tree dest
= GOTO_DESTINATION (*stmt_p
);
1636 data
->may_branch
= true;
1637 data
->last_goto
= NULL
;
1639 /* Record the last goto expr, so that we can delete it if unnecessary. */
1640 if (TREE_CODE (dest
) == LABEL_DECL
)
1641 data
->last_goto
= stmt_p
;
1646 remove_useless_stmts_label (tree
*stmt_p
, struct rus_data
*data
)
1648 tree label
= LABEL_EXPR_LABEL (*stmt_p
);
1650 data
->has_label
= true;
1652 /* We do want to jump across non-local label receiver code. */
1653 if (DECL_NONLOCAL (label
))
1654 data
->last_goto
= NULL
;
1656 else if (data
->last_goto
&& GOTO_DESTINATION (*data
->last_goto
) == label
)
1658 *data
->last_goto
= build_empty_stmt ();
1659 data
->repeat
= true;
1662 /* ??? Add something here to delete unused labels. */
1666 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1667 decl. This allows us to eliminate redundant or useless
1668 calls to "const" functions.
1670 Gimplifier already does the same operation, but we may notice functions
1671 being const and pure once their calls has been gimplified, so we need
1672 to update the flag. */
1675 update_call_expr_flags (tree call
)
1677 tree decl
= get_callee_fndecl (call
);
1680 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1681 TREE_SIDE_EFFECTS (call
) = 0;
1682 if (TREE_NOTHROW (decl
))
1683 TREE_NOTHROW (call
) = 1;
1687 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1690 notice_special_calls (tree t
)
1692 int flags
= call_expr_flags (t
);
1694 if (flags
& ECF_MAY_BE_ALLOCA
)
1695 current_function_calls_alloca
= true;
1696 if (flags
& ECF_RETURNS_TWICE
)
1697 current_function_calls_setjmp
= true;
1701 /* Clear flags set by notice_special_calls. Used by dead code removal
1702 to update the flags. */
1705 clear_special_calls (void)
1707 current_function_calls_alloca
= false;
1708 current_function_calls_setjmp
= false;
1713 remove_useless_stmts_1 (tree
*tp
, struct rus_data
*data
)
1717 switch (TREE_CODE (t
))
1720 remove_useless_stmts_cond (tp
, data
);
1723 case TRY_FINALLY_EXPR
:
1724 remove_useless_stmts_tf (tp
, data
);
1727 case TRY_CATCH_EXPR
:
1728 remove_useless_stmts_tc (tp
, data
);
1732 remove_useless_stmts_bind (tp
, data
);
1736 remove_useless_stmts_goto (tp
, data
);
1740 remove_useless_stmts_label (tp
, data
);
1745 data
->last_goto
= NULL
;
1746 data
->may_branch
= true;
1751 data
->last_goto
= NULL
;
1752 notice_special_calls (t
);
1753 update_call_expr_flags (t
);
1754 if (tree_could_throw_p (t
))
1755 data
->may_throw
= true;
1759 data
->last_goto
= NULL
;
1761 op
= get_call_expr_in (t
);
1764 update_call_expr_flags (op
);
1765 notice_special_calls (op
);
1767 if (tree_could_throw_p (t
))
1768 data
->may_throw
= true;
1771 case STATEMENT_LIST
:
1773 tree_stmt_iterator i
= tsi_start (t
);
1774 while (!tsi_end_p (i
))
1777 if (IS_EMPTY_STMT (t
))
1783 remove_useless_stmts_1 (tsi_stmt_ptr (i
), data
);
1786 if (TREE_CODE (t
) == STATEMENT_LIST
)
1788 tsi_link_before (&i
, t
, TSI_SAME_STMT
);
1798 data
->last_goto
= NULL
;
1802 data
->last_goto
= NULL
;
1808 remove_useless_stmts (void)
1810 struct rus_data data
;
1812 clear_special_calls ();
1816 memset (&data
, 0, sizeof (data
));
1817 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl
), &data
);
1819 while (data
.repeat
);
1823 struct tree_opt_pass pass_remove_useless_stmts
=
1825 "useless", /* name */
1827 remove_useless_stmts
, /* execute */
1830 0, /* static_pass_number */
1832 PROP_gimple_any
, /* properties_required */
1833 0, /* properties_provided */
1834 0, /* properties_destroyed */
1835 0, /* todo_flags_start */
1836 TODO_dump_func
, /* todo_flags_finish */
1841 /* Remove obviously useless statements in basic block BB. */
1844 cfg_remove_useless_stmts_bb (basic_block bb
)
1846 block_stmt_iterator bsi
;
1847 tree stmt
= NULL_TREE
;
1848 tree cond
, var
= NULL_TREE
, val
= NULL_TREE
;
1849 struct var_ann_d
*ann
;
1851 /* Check whether we come here from a condition, and if so, get the
1853 if (EDGE_COUNT (bb
->preds
) != 1
1854 || !(EDGE_PRED (bb
, 0)->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
1857 cond
= COND_EXPR_COND (last_stmt (EDGE_PRED (bb
, 0)->src
));
1859 if (TREE_CODE (cond
) == VAR_DECL
|| TREE_CODE (cond
) == PARM_DECL
)
1862 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1863 ? boolean_false_node
: boolean_true_node
);
1865 else if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
1866 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1867 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
))
1869 var
= TREE_OPERAND (cond
, 0);
1870 val
= (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
1871 ? boolean_true_node
: boolean_false_node
);
1875 if (EDGE_PRED (bb
, 0)->flags
& EDGE_FALSE_VALUE
)
1876 cond
= invert_truthvalue (cond
);
1877 if (TREE_CODE (cond
) == EQ_EXPR
1878 && (TREE_CODE (TREE_OPERAND (cond
, 0)) == VAR_DECL
1879 || TREE_CODE (TREE_OPERAND (cond
, 0)) == PARM_DECL
)
1880 && (TREE_CODE (TREE_OPERAND (cond
, 1)) == VAR_DECL
1881 || TREE_CODE (TREE_OPERAND (cond
, 1)) == PARM_DECL
1882 || TREE_CONSTANT (TREE_OPERAND (cond
, 1))))
1884 var
= TREE_OPERAND (cond
, 0);
1885 val
= TREE_OPERAND (cond
, 1);
1891 /* Only work for normal local variables. */
1892 ann
= var_ann (var
);
1895 || TREE_ADDRESSABLE (var
))
1898 if (! TREE_CONSTANT (val
))
1900 ann
= var_ann (val
);
1903 || TREE_ADDRESSABLE (val
))
1907 /* Ignore floating point variables, since comparison behaves weird for
1909 if (FLOAT_TYPE_P (TREE_TYPE (var
)))
1912 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
);)
1914 stmt
= bsi_stmt (bsi
);
1916 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1917 which is already known to contain that value, then remove the useless
1918 THEN/ELSE clause. */
1919 if (TREE_CODE (stmt
) == MODIFY_EXPR
1920 && TREE_OPERAND (stmt
, 0) == var
1921 && operand_equal_p (val
, TREE_OPERAND (stmt
, 1), 0))
1927 /* Invalidate the var if we encounter something that could modify it.
1928 Likewise for the value it was previously set to. Note that we only
1929 consider values that are either a VAR_DECL or PARM_DECL so we
1930 can test for conflict very simply. */
1931 if (TREE_CODE (stmt
) == ASM_EXPR
1932 || (TREE_CODE (stmt
) == MODIFY_EXPR
1933 && (TREE_OPERAND (stmt
, 0) == var
1934 || TREE_OPERAND (stmt
, 0) == val
)))
1942 /* A CFG-aware version of remove_useless_stmts. */
1945 cfg_remove_useless_stmts (void)
1949 #ifdef ENABLE_CHECKING
1950 verify_flow_info ();
1955 cfg_remove_useless_stmts_bb (bb
);
1960 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1963 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1967 /* Since this block is no longer reachable, we can just delete all
1968 of its PHI nodes. */
1969 phi
= phi_nodes (bb
);
1972 tree next
= PHI_CHAIN (phi
);
1973 remove_phi_node (phi
, NULL_TREE
, bb
);
1977 /* Remove edges to BB's successors. */
1978 while (EDGE_COUNT (bb
->succs
) > 0)
1979 remove_edge (EDGE_SUCC (bb
, 0));
1983 /* Remove statements of basic block BB. */
1986 remove_bb (basic_block bb
)
1988 block_stmt_iterator i
;
1989 source_locus loc
= 0;
1993 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1994 if (dump_flags
& TDF_DETAILS
)
1996 dump_bb (bb
, dump_file
, 0);
1997 fprintf (dump_file
, "\n");
2001 /* Remove all the instructions in the block. */
2002 for (i
= bsi_start (bb
); !bsi_end_p (i
);)
2004 tree stmt
= bsi_stmt (i
);
2005 if (TREE_CODE (stmt
) == LABEL_EXPR
2006 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt
)))
2008 basic_block new_bb
= bb
->prev_bb
;
2009 block_stmt_iterator new_bsi
= bsi_start (new_bb
);
2012 bsi_insert_before (&new_bsi
, stmt
, BSI_NEW_STMT
);
2016 release_defs (stmt
);
2018 set_bb_for_stmt (stmt
, NULL
);
2022 /* Don't warn for removed gotos. Gotos are often removed due to
2023 jump threading, thus resulting in bogus warnings. Not great,
2024 since this way we lose warnings for gotos in the original
2025 program that are indeed unreachable. */
2026 if (TREE_CODE (stmt
) != GOTO_EXPR
&& EXPR_HAS_LOCATION (stmt
) && !loc
)
2030 #ifdef USE_MAPPED_LOCATION
2031 t
= EXPR_LOCATION (stmt
);
2033 t
= EXPR_LOCUS (stmt
);
2035 if (t
&& LOCATION_LINE (*t
) > 0)
2040 /* If requested, give a warning that the first statement in the
2041 block is unreachable. We walk statements backwards in the
2042 loop above, so the last statement we process is the first statement
2044 if (warn_notreached
&& loc
)
2045 #ifdef USE_MAPPED_LOCATION
2046 warning ("%Hwill never be executed", &loc
);
2048 warning ("%Hwill never be executed", loc
);
2051 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2054 /* Try to remove superfluous control structures. */
2057 cleanup_control_flow (void)
2060 block_stmt_iterator bsi
;
2061 bool retval
= false;
2066 bsi
= bsi_last (bb
);
2068 if (bsi_end_p (bsi
))
2071 stmt
= bsi_stmt (bsi
);
2072 if (TREE_CODE (stmt
) == COND_EXPR
2073 || TREE_CODE (stmt
) == SWITCH_EXPR
)
2074 retval
|= cleanup_control_expr_graph (bb
, bsi
);
2080 /* Disconnect an unreachable block in the control expression starting
2084 cleanup_control_expr_graph (basic_block bb
, block_stmt_iterator bsi
)
2087 bool retval
= false;
2088 tree expr
= bsi_stmt (bsi
), val
;
2090 if (EDGE_COUNT (bb
->succs
) > 1)
2095 switch (TREE_CODE (expr
))
2098 val
= COND_EXPR_COND (expr
);
2102 val
= SWITCH_COND (expr
);
2103 if (TREE_CODE (val
) != INTEGER_CST
)
2111 taken_edge
= find_taken_edge (bb
, val
);
2115 /* Remove all the edges except the one that is always executed. */
2116 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2118 if (e
!= taken_edge
)
2120 taken_edge
->probability
+= e
->probability
;
2121 taken_edge
->count
+= e
->count
;
2128 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
2129 taken_edge
->probability
= REG_BR_PROB_BASE
;
2132 taken_edge
= EDGE_SUCC (bb
, 0);
2135 taken_edge
->flags
= EDGE_FALLTHRU
;
2137 /* We removed some paths from the cfg. */
2138 free_dominance_info (CDI_DOMINATORS
);
2144 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2145 predicate VAL, return the edge that will be taken out of the block.
2146 If VAL does not match a unique edge, NULL is returned. */
2149 find_taken_edge (basic_block bb
, tree val
)
2153 stmt
= last_stmt (bb
);
2156 gcc_assert (is_ctrl_stmt (stmt
));
2159 /* If VAL is a predicate of the form N RELOP N, where N is an
2160 SSA_NAME, we can usually determine its truth value. */
2161 if (COMPARISON_CLASS_P (val
))
2164 /* If VAL is not a constant, we can't determine which edge might
2166 if (!really_constant_p (val
))
2169 if (TREE_CODE (stmt
) == COND_EXPR
)
2170 return find_taken_edge_cond_expr (bb
, val
);
2172 if (TREE_CODE (stmt
) == SWITCH_EXPR
)
2173 return find_taken_edge_switch_expr (bb
, val
);
2179 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2180 statement, determine which of the two edges will be taken out of the
2181 block. Return NULL if either edge may be taken. */
2184 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2186 edge true_edge
, false_edge
;
2188 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2190 /* Otherwise, try to determine which branch of the if() will be taken.
2191 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2192 we don't really know which edge will be taken at runtime. This
2193 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2194 if (integer_nonzerop (val
))
2196 else if (integer_zerop (val
))
2203 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2204 statement, determine which edge will be taken out of the block. Return
2205 NULL if any edge may be taken. */
2208 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2210 tree switch_expr
, taken_case
;
2211 basic_block dest_bb
;
2214 if (TREE_CODE (val
) != INTEGER_CST
)
2217 switch_expr
= last_stmt (bb
);
2218 taken_case
= find_case_label_for_value (switch_expr
, val
);
2219 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2221 e
= find_edge (bb
, dest_bb
);
2227 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2228 We can make optimal use here of the fact that the case labels are
2229 sorted: We can do a binary search for a case matching VAL. */
2232 find_case_label_for_value (tree switch_expr
, tree val
)
2234 tree vec
= SWITCH_LABELS (switch_expr
);
2235 size_t low
, high
, n
= TREE_VEC_LENGTH (vec
);
2236 tree default_case
= TREE_VEC_ELT (vec
, n
- 1);
2238 for (low
= -1, high
= n
- 1; high
- low
> 1; )
2240 size_t i
= (high
+ low
) / 2;
2241 tree t
= TREE_VEC_ELT (vec
, i
);
2244 /* Cache the result of comparing CASE_LOW and val. */
2245 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2252 if (CASE_HIGH (t
) == NULL
)
2254 /* A singe-valued case label. */
2260 /* A case range. We can only handle integer ranges. */
2261 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2266 return default_case
;
2270 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2271 those alternatives are equal in each of the PHI nodes, then return
2272 true, else return false. */
2275 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
2277 int n1
= e1
->dest_idx
;
2278 int n2
= e2
->dest_idx
;
2281 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
2283 tree val1
= PHI_ARG_DEF (phi
, n1
);
2284 tree val2
= PHI_ARG_DEF (phi
, n2
);
2286 gcc_assert (val1
!= NULL_TREE
);
2287 gcc_assert (val2
!= NULL_TREE
);
2289 if (!operand_equal_for_phi_arg_p (val1
, val2
))
2297 /*---------------------------------------------------------------------------
2299 ---------------------------------------------------------------------------*/
2301 /* Dump tree-specific information of block BB to file OUTF. */
2304 tree_dump_bb (basic_block bb
, FILE *outf
, int indent
)
2306 dump_generic_bb (outf
, bb
, indent
, TDF_VOPS
);
2310 /* Dump a basic block on stderr. */
2313 debug_tree_bb (basic_block bb
)
2315 dump_bb (bb
, stderr
, 0);
2319 /* Dump basic block with index N on stderr. */
2322 debug_tree_bb_n (int n
)
2324 debug_tree_bb (BASIC_BLOCK (n
));
2325 return BASIC_BLOCK (n
);
2329 /* Dump the CFG on stderr.
2331 FLAGS are the same used by the tree dumping functions
2332 (see TDF_* in tree.h). */
2335 debug_tree_cfg (int flags
)
2337 dump_tree_cfg (stderr
, flags
);
2341 /* Dump the program showing basic block boundaries on the given FILE.
2343 FLAGS are the same used by the tree dumping functions (see TDF_* in
2347 dump_tree_cfg (FILE *file
, int flags
)
2349 if (flags
& TDF_DETAILS
)
2351 const char *funcname
2352 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2355 fprintf (file
, ";; Function %s\n\n", funcname
);
2356 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2357 n_basic_blocks
, n_edges
, last_basic_block
);
2359 brief_dump_cfg (file
);
2360 fprintf (file
, "\n");
2363 if (flags
& TDF_STATS
)
2364 dump_cfg_stats (file
);
2366 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2370 /* Dump CFG statistics on FILE. */
2373 dump_cfg_stats (FILE *file
)
2375 static long max_num_merged_labels
= 0;
2376 unsigned long size
, total
= 0;
2379 const char * const fmt_str
= "%-30s%-13s%12s\n";
2380 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2381 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2382 const char *funcname
2383 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2386 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2388 fprintf (file
, "---------------------------------------------------------\n");
2389 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2390 fprintf (file
, fmt_str
, "", " instances ", "used ");
2391 fprintf (file
, "---------------------------------------------------------\n");
2393 size
= n_basic_blocks
* sizeof (struct basic_block_def
);
2395 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks
,
2396 SCALE (size
), LABEL (size
));
2400 n_edges
+= EDGE_COUNT (bb
->succs
);
2401 size
= n_edges
* sizeof (struct edge_def
);
2403 fprintf (file
, fmt_str_1
, "Edges", n_edges
, SCALE (size
), LABEL (size
));
2405 size
= n_basic_blocks
* sizeof (struct bb_ann_d
);
2407 fprintf (file
, fmt_str_1
, "Basic block annotations", n_basic_blocks
,
2408 SCALE (size
), LABEL (size
));
2410 fprintf (file
, "---------------------------------------------------------\n");
2411 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2413 fprintf (file
, "---------------------------------------------------------\n");
2414 fprintf (file
, "\n");
2416 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2417 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2419 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2420 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2422 fprintf (file
, "\n");
2426 /* Dump CFG statistics on stderr. Keep extern so that it's always
2427 linked in the final executable. */
2430 debug_cfg_stats (void)
2432 dump_cfg_stats (stderr
);
2436 /* Dump the flowgraph to a .vcg FILE. */
2439 tree_cfg2vcg (FILE *file
)
2444 const char *funcname
2445 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
2447 /* Write the file header. */
2448 fprintf (file
, "graph: { title: \"%s\"\n", funcname
);
2449 fprintf (file
, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2450 fprintf (file
, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2452 /* Write blocks and edges. */
2453 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2455 fprintf (file
, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2458 if (e
->flags
& EDGE_FAKE
)
2459 fprintf (file
, " linestyle: dotted priority: 10");
2461 fprintf (file
, " linestyle: solid priority: 100");
2463 fprintf (file
, " }\n");
2469 enum tree_code head_code
, end_code
;
2470 const char *head_name
, *end_name
;
2473 tree first
= first_stmt (bb
);
2474 tree last
= last_stmt (bb
);
2478 head_code
= TREE_CODE (first
);
2479 head_name
= tree_code_name
[head_code
];
2480 head_line
= get_lineno (first
);
2483 head_name
= "no-statement";
2487 end_code
= TREE_CODE (last
);
2488 end_name
= tree_code_name
[end_code
];
2489 end_line
= get_lineno (last
);
2492 end_name
= "no-statement";
2494 fprintf (file
, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2495 bb
->index
, bb
->index
, head_name
, head_line
, end_name
,
2498 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2500 if (e
->dest
== EXIT_BLOCK_PTR
)
2501 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb
->index
);
2503 fprintf (file
, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb
->index
, e
->dest
->index
);
2505 if (e
->flags
& EDGE_FAKE
)
2506 fprintf (file
, " priority: 10 linestyle: dotted");
2508 fprintf (file
, " priority: 100 linestyle: solid");
2510 fprintf (file
, " }\n");
2513 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2517 fputs ("}\n\n", file
);
2522 /*---------------------------------------------------------------------------
2523 Miscellaneous helpers
2524 ---------------------------------------------------------------------------*/
2526 /* Return true if T represents a stmt that always transfers control. */
2529 is_ctrl_stmt (tree t
)
2531 return (TREE_CODE (t
) == COND_EXPR
2532 || TREE_CODE (t
) == SWITCH_EXPR
2533 || TREE_CODE (t
) == GOTO_EXPR
2534 || TREE_CODE (t
) == RETURN_EXPR
2535 || TREE_CODE (t
) == RESX_EXPR
);
2539 /* Return true if T is a statement that may alter the flow of control
2540 (e.g., a call to a non-returning function). */
2543 is_ctrl_altering_stmt (tree t
)
2548 call
= get_call_expr_in (t
);
2551 /* A non-pure/const CALL_EXPR alters flow control if the current
2552 function has nonlocal labels. */
2553 if (TREE_SIDE_EFFECTS (call
) && current_function_has_nonlocal_label
)
2556 /* A CALL_EXPR also alters control flow if it does not return. */
2557 if (call_expr_flags (call
) & ECF_NORETURN
)
2561 /* If a statement can throw, it alters control flow. */
2562 return tree_can_throw_internal (t
);
2566 /* Return true if T is a computed goto. */
2569 computed_goto_p (tree t
)
2571 return (TREE_CODE (t
) == GOTO_EXPR
2572 && TREE_CODE (GOTO_DESTINATION (t
)) != LABEL_DECL
);
2576 /* Checks whether EXPR is a simple local goto. */
2579 simple_goto_p (tree expr
)
2581 return (TREE_CODE (expr
) == GOTO_EXPR
2582 && TREE_CODE (GOTO_DESTINATION (expr
)) == LABEL_DECL
);
2586 /* Return true if T should start a new basic block. PREV_T is the
2587 statement preceding T. It is used when T is a label or a case label.
2588 Labels should only start a new basic block if their previous statement
2589 wasn't a label. Otherwise, sequence of labels would generate
2590 unnecessary basic blocks that only contain a single label. */
2593 stmt_starts_bb_p (tree t
, tree prev_t
)
2595 enum tree_code code
;
2600 /* LABEL_EXPRs start a new basic block only if the preceding
2601 statement wasn't a label of the same type. This prevents the
2602 creation of consecutive blocks that have nothing but a single
2604 code
= TREE_CODE (t
);
2605 if (code
== LABEL_EXPR
)
2607 /* Nonlocal and computed GOTO targets always start a new block. */
2608 if (code
== LABEL_EXPR
2609 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t
))
2610 || FORCED_LABEL (LABEL_EXPR_LABEL (t
))))
2613 if (prev_t
&& TREE_CODE (prev_t
) == code
)
2615 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t
)))
2618 cfg_stats
.num_merged_labels
++;
2629 /* Return true if T should end a basic block. */
2632 stmt_ends_bb_p (tree t
)
2634 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2638 /* Add gotos that used to be represented implicitly in the CFG. */
2641 disband_implicit_edges (void)
2644 block_stmt_iterator last
;
2651 last
= bsi_last (bb
);
2652 stmt
= last_stmt (bb
);
2654 if (stmt
&& TREE_CODE (stmt
) == COND_EXPR
)
2656 /* Remove superfluous gotos from COND_EXPR branches. Moved
2657 from cfg_remove_useless_stmts here since it violates the
2658 invariants for tree--cfg correspondence and thus fits better
2659 here where we do it anyway. */
2660 e
= find_edge (bb
, bb
->next_bb
);
2663 if (e
->flags
& EDGE_TRUE_VALUE
)
2664 COND_EXPR_THEN (stmt
) = build_empty_stmt ();
2665 else if (e
->flags
& EDGE_FALSE_VALUE
)
2666 COND_EXPR_ELSE (stmt
) = build_empty_stmt ();
2669 e
->flags
|= EDGE_FALLTHRU
;
2675 if (stmt
&& TREE_CODE (stmt
) == RETURN_EXPR
)
2677 /* Remove the RETURN_EXPR if we may fall though to the exit
2679 gcc_assert (EDGE_COUNT (bb
->succs
) == 1);
2680 gcc_assert (EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
);
2682 if (bb
->next_bb
== EXIT_BLOCK_PTR
2683 && !TREE_OPERAND (stmt
, 0))
2686 EDGE_SUCC (bb
, 0)->flags
|= EDGE_FALLTHRU
;
2691 /* There can be no fallthru edge if the last statement is a control
2693 if (stmt
&& is_ctrl_stmt (stmt
))
2696 /* Find a fallthru edge and emit the goto if necessary. */
2697 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2698 if (e
->flags
& EDGE_FALLTHRU
)
2701 if (!e
|| e
->dest
== bb
->next_bb
)
2704 gcc_assert (e
->dest
!= EXIT_BLOCK_PTR
);
2705 label
= tree_block_label (e
->dest
);
2707 stmt
= build1 (GOTO_EXPR
, void_type_node
, label
);
2708 #ifdef USE_MAPPED_LOCATION
2709 SET_EXPR_LOCATION (stmt
, e
->goto_locus
);
2711 SET_EXPR_LOCUS (stmt
, e
->goto_locus
);
2713 bsi_insert_after (&last
, stmt
, BSI_NEW_STMT
);
2714 e
->flags
&= ~EDGE_FALLTHRU
;
2718 /* Remove block annotations and other datastructures. */
2721 delete_tree_cfg_annotations (void)
2724 if (n_basic_blocks
> 0)
2725 free_blocks_annotations ();
2727 label_to_block_map
= NULL
;
2734 /* Return the first statement in basic block BB. */
2737 first_stmt (basic_block bb
)
2739 block_stmt_iterator i
= bsi_start (bb
);
2740 return !bsi_end_p (i
) ? bsi_stmt (i
) : NULL_TREE
;
2744 /* Return the last statement in basic block BB. */
2747 last_stmt (basic_block bb
)
2749 block_stmt_iterator b
= bsi_last (bb
);
2750 return !bsi_end_p (b
) ? bsi_stmt (b
) : NULL_TREE
;
2754 /* Return a pointer to the last statement in block BB. */
2757 last_stmt_ptr (basic_block bb
)
2759 block_stmt_iterator last
= bsi_last (bb
);
2760 return !bsi_end_p (last
) ? bsi_stmt_ptr (last
) : NULL
;
2764 /* Return the last statement of an otherwise empty block. Return NULL
2765 if the block is totally empty, or if it contains more than one
2769 last_and_only_stmt (basic_block bb
)
2771 block_stmt_iterator i
= bsi_last (bb
);
2777 last
= bsi_stmt (i
);
2782 /* Empty statements should no longer appear in the instruction stream.
2783 Everything that might have appeared before should be deleted by
2784 remove_useless_stmts, and the optimizers should just bsi_remove
2785 instead of smashing with build_empty_stmt.
2787 Thus the only thing that should appear here in a block containing
2788 one executable statement is a label. */
2789 prev
= bsi_stmt (i
);
2790 if (TREE_CODE (prev
) == LABEL_EXPR
)
2797 /* Mark BB as the basic block holding statement T. */
2800 set_bb_for_stmt (tree t
, basic_block bb
)
2802 if (TREE_CODE (t
) == PHI_NODE
)
2804 else if (TREE_CODE (t
) == STATEMENT_LIST
)
2806 tree_stmt_iterator i
;
2807 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
2808 set_bb_for_stmt (tsi_stmt (i
), bb
);
2812 stmt_ann_t ann
= get_stmt_ann (t
);
2815 /* If the statement is a label, add the label to block-to-labels map
2816 so that we can speed up edge creation for GOTO_EXPRs. */
2817 if (TREE_CODE (t
) == LABEL_EXPR
)
2821 t
= LABEL_EXPR_LABEL (t
);
2822 uid
= LABEL_DECL_UID (t
);
2825 LABEL_DECL_UID (t
) = uid
= cfun
->last_label_uid
++;
2826 if (VARRAY_SIZE (label_to_block_map
) <= (unsigned) uid
)
2827 VARRAY_GROW (label_to_block_map
, 3 * uid
/ 2);
2830 /* We're moving an existing label. Make sure that we've
2831 removed it from the old block. */
2832 gcc_assert (!bb
|| !VARRAY_BB (label_to_block_map
, uid
));
2833 VARRAY_BB (label_to_block_map
, uid
) = bb
;
2838 /* Finds iterator for STMT. */
2840 extern block_stmt_iterator
2841 bsi_for_stmt (tree stmt
)
2843 block_stmt_iterator bsi
;
2845 for (bsi
= bsi_start (bb_for_stmt (stmt
)); !bsi_end_p (bsi
); bsi_next (&bsi
))
2846 if (bsi_stmt (bsi
) == stmt
)
2852 /* Insert statement (or statement list) T before the statement
2853 pointed-to by iterator I. M specifies how to update iterator I
2854 after insertion (see enum bsi_iterator_update). */
2857 bsi_insert_before (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2859 set_bb_for_stmt (t
, i
->bb
);
2860 tsi_link_before (&i
->tsi
, t
, m
);
2865 /* Insert statement (or statement list) T after the statement
2866 pointed-to by iterator I. M specifies how to update iterator I
2867 after insertion (see enum bsi_iterator_update). */
2870 bsi_insert_after (block_stmt_iterator
*i
, tree t
, enum bsi_iterator_update m
)
2872 set_bb_for_stmt (t
, i
->bb
);
2873 tsi_link_after (&i
->tsi
, t
, m
);
2878 /* Remove the statement pointed to by iterator I. The iterator is updated
2879 to the next statement. */
2882 bsi_remove (block_stmt_iterator
*i
)
2884 tree t
= bsi_stmt (*i
);
2885 set_bb_for_stmt (t
, NULL
);
2886 tsi_delink (&i
->tsi
);
2890 /* Move the statement at FROM so it comes right after the statement at TO. */
2893 bsi_move_after (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2895 tree stmt
= bsi_stmt (*from
);
2897 bsi_insert_after (to
, stmt
, BSI_SAME_STMT
);
2901 /* Move the statement at FROM so it comes right before the statement at TO. */
2904 bsi_move_before (block_stmt_iterator
*from
, block_stmt_iterator
*to
)
2906 tree stmt
= bsi_stmt (*from
);
2908 bsi_insert_before (to
, stmt
, BSI_SAME_STMT
);
2912 /* Move the statement at FROM to the end of basic block BB. */
2915 bsi_move_to_bb_end (block_stmt_iterator
*from
, basic_block bb
)
2917 block_stmt_iterator last
= bsi_last (bb
);
2919 /* Have to check bsi_end_p because it could be an empty block. */
2920 if (!bsi_end_p (last
) && is_ctrl_stmt (bsi_stmt (last
)))
2921 bsi_move_before (from
, &last
);
2923 bsi_move_after (from
, &last
);
2927 /* Replace the contents of the statement pointed to by iterator BSI
2928 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2929 information of the original statement is preserved. */
2932 bsi_replace (const block_stmt_iterator
*bsi
, tree stmt
, bool preserve_eh_info
)
2935 tree orig_stmt
= bsi_stmt (*bsi
);
2937 SET_EXPR_LOCUS (stmt
, EXPR_LOCUS (orig_stmt
));
2938 set_bb_for_stmt (stmt
, bsi
->bb
);
2940 /* Preserve EH region information from the original statement, if
2941 requested by the caller. */
2942 if (preserve_eh_info
)
2944 eh_region
= lookup_stmt_eh_region (orig_stmt
);
2946 add_stmt_to_eh_region (stmt
, eh_region
);
2949 *bsi_stmt_ptr (*bsi
) = stmt
;
2954 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2955 is made to place the statement in an existing basic block, but
2956 sometimes that isn't possible. When it isn't possible, the edge is
2957 split and the statement is added to the new block.
2959 In all cases, the returned *BSI points to the correct location. The
2960 return value is true if insertion should be done after the location,
2961 or false if it should be done before the location. If new basic block
2962 has to be created, it is stored in *NEW_BB. */
2965 tree_find_edge_insert_loc (edge e
, block_stmt_iterator
*bsi
,
2966 basic_block
*new_bb
)
2968 basic_block dest
, src
;
2974 /* If the destination has one predecessor which has no PHI nodes,
2975 insert there. Except for the exit block.
2977 The requirement for no PHI nodes could be relaxed. Basically we
2978 would have to examine the PHIs to prove that none of them used
2979 the value set by the statement we want to insert on E. That
2980 hardly seems worth the effort. */
2981 if (EDGE_COUNT (dest
->preds
) == 1
2982 && ! phi_nodes (dest
)
2983 && dest
!= EXIT_BLOCK_PTR
)
2985 *bsi
= bsi_start (dest
);
2986 if (bsi_end_p (*bsi
))
2989 /* Make sure we insert after any leading labels. */
2990 tmp
= bsi_stmt (*bsi
);
2991 while (TREE_CODE (tmp
) == LABEL_EXPR
)
2994 if (bsi_end_p (*bsi
))
2996 tmp
= bsi_stmt (*bsi
);
2999 if (bsi_end_p (*bsi
))
3001 *bsi
= bsi_last (dest
);
3008 /* If the source has one successor, the edge is not abnormal and
3009 the last statement does not end a basic block, insert there.
3010 Except for the entry block. */
3012 if ((e
->flags
& EDGE_ABNORMAL
) == 0
3013 && EDGE_COUNT (src
->succs
) == 1
3014 && src
!= ENTRY_BLOCK_PTR
)
3016 *bsi
= bsi_last (src
);
3017 if (bsi_end_p (*bsi
))
3020 tmp
= bsi_stmt (*bsi
);
3021 if (!stmt_ends_bb_p (tmp
))
3024 /* Insert code just before returning the value. We may need to decompose
3025 the return in the case it contains non-trivial operand. */
3026 if (TREE_CODE (tmp
) == RETURN_EXPR
)
3028 tree op
= TREE_OPERAND (tmp
, 0);
3029 if (!is_gimple_val (op
))
3031 gcc_assert (TREE_CODE (op
) == MODIFY_EXPR
);
3032 bsi_insert_before (bsi
, op
, BSI_NEW_STMT
);
3033 TREE_OPERAND (tmp
, 0) = TREE_OPERAND (op
, 0);
3040 /* Otherwise, create a new basic block, and split this edge. */
3041 dest
= split_edge (e
);
3044 e
= EDGE_PRED (dest
, 0);
3049 /* This routine will commit all pending edge insertions, creating any new
3050 basic blocks which are necessary. */
3053 bsi_commit_edge_inserts (void)
3059 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR
, 0), NULL
);
3062 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3063 bsi_commit_one_edge_insert (e
, NULL
);
3067 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3068 to this block, otherwise set it to NULL. */
3071 bsi_commit_one_edge_insert (edge e
, basic_block
*new_bb
)
3075 if (PENDING_STMT (e
))
3077 block_stmt_iterator bsi
;
3078 tree stmt
= PENDING_STMT (e
);
3080 PENDING_STMT (e
) = NULL_TREE
;
3082 if (tree_find_edge_insert_loc (e
, &bsi
, new_bb
))
3083 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3085 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3090 /* Add STMT to the pending list of edge E. No actual insertion is
3091 made until a call to bsi_commit_edge_inserts () is made. */
3094 bsi_insert_on_edge (edge e
, tree stmt
)
3096 append_to_statement_list (stmt
, &PENDING_STMT (e
));
3099 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3100 block has to be created, it is returned. */
3103 bsi_insert_on_edge_immediate (edge e
, tree stmt
)
3105 block_stmt_iterator bsi
;
3106 basic_block new_bb
= NULL
;
3108 gcc_assert (!PENDING_STMT (e
));
3110 if (tree_find_edge_insert_loc (e
, &bsi
, &new_bb
))
3111 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
3113 bsi_insert_before (&bsi
, stmt
, BSI_NEW_STMT
);
3118 /*---------------------------------------------------------------------------
3119 Tree specific functions for CFG manipulation
3120 ---------------------------------------------------------------------------*/
3122 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3125 reinstall_phi_args (edge new_edge
, edge old_edge
)
3129 if (!PENDING_STMT (old_edge
))
3132 for (var
= PENDING_STMT (old_edge
), phi
= phi_nodes (new_edge
->dest
);
3134 var
= TREE_CHAIN (var
), phi
= PHI_CHAIN (phi
))
3136 tree result
= TREE_PURPOSE (var
);
3137 tree arg
= TREE_VALUE (var
);
3139 gcc_assert (result
== PHI_RESULT (phi
));
3141 add_phi_arg (phi
, arg
, new_edge
);
3144 PENDING_STMT (old_edge
) = NULL
;
3147 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3148 Abort on abnormal edges. */
3151 tree_split_edge (edge edge_in
)
3153 basic_block new_bb
, after_bb
, dest
, src
;
3156 /* Abnormal edges cannot be split. */
3157 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
3160 dest
= edge_in
->dest
;
3162 /* Place the new block in the block list. Try to keep the new block
3163 near its "logical" location. This is of most help to humans looking
3164 at debugging dumps. */
3165 if (dest
->prev_bb
&& find_edge (dest
->prev_bb
, dest
))
3166 after_bb
= edge_in
->src
;
3168 after_bb
= dest
->prev_bb
;
3170 new_bb
= create_empty_bb (after_bb
);
3171 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
3172 new_bb
->count
= edge_in
->count
;
3173 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
3174 new_edge
->probability
= REG_BR_PROB_BASE
;
3175 new_edge
->count
= edge_in
->count
;
3177 e
= redirect_edge_and_branch (edge_in
, new_bb
);
3179 reinstall_phi_args (new_edge
, e
);
3185 /* Return true when BB has label LABEL in it. */
3188 has_label_p (basic_block bb
, tree label
)
3190 block_stmt_iterator bsi
;
3192 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3194 tree stmt
= bsi_stmt (bsi
);
3196 if (TREE_CODE (stmt
) != LABEL_EXPR
)
3198 if (LABEL_EXPR_LABEL (stmt
) == label
)
3205 /* Callback for walk_tree, check that all elements with address taken are
3206 properly noticed as such. */
3209 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3216 /* Check operand N for being valid GIMPLE and give error MSG if not.
3217 We check for constants explicitly since they are not considered
3218 gimple invariants if they overflowed. */
3219 #define CHECK_OP(N, MSG) \
3220 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3221 && !is_gimple_val (TREE_OPERAND (t, N))) \
3222 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3224 switch (TREE_CODE (t
))
3227 if (SSA_NAME_IN_FREE_LIST (t
))
3229 error ("SSA name in freelist but still referenced");
3235 x
= TREE_OPERAND (t
, 0);
3236 if (TREE_CODE (x
) == BIT_FIELD_REF
3237 && is_gimple_reg (TREE_OPERAND (x
, 0)))
3239 error ("GIMPLE register modified with BIT_FIELD_REF");
3245 /* Skip any references (they will be checked when we recurse down the
3246 tree) and ensure that any variable used as a prefix is marked
3248 for (x
= TREE_OPERAND (t
, 0);
3249 handled_component_p (x
);
3250 x
= TREE_OPERAND (x
, 0))
3253 if (TREE_CODE (x
) != VAR_DECL
&& TREE_CODE (x
) != PARM_DECL
)
3255 if (!TREE_ADDRESSABLE (x
))
3257 error ("address taken, but ADDRESSABLE bit not set");
3263 x
= COND_EXPR_COND (t
);
3264 if (TREE_CODE (TREE_TYPE (x
)) != BOOLEAN_TYPE
)
3266 error ("non-boolean used in condition");
3273 case FIX_TRUNC_EXPR
:
3275 case FIX_FLOOR_EXPR
:
3276 case FIX_ROUND_EXPR
:
3281 case NON_LVALUE_EXPR
:
3282 case TRUTH_NOT_EXPR
:
3283 CHECK_OP (0, "Invalid operand to unary operator");
3290 case ARRAY_RANGE_REF
:
3292 case VIEW_CONVERT_EXPR
:
3293 /* We have a nest of references. Verify that each of the operands
3294 that determine where to reference is either a constant or a variable,
3295 verify that the base is valid, and then show we've already checked
3297 while (handled_component_p (t
))
3299 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3300 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3301 else if (TREE_CODE (t
) == ARRAY_REF
3302 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3304 CHECK_OP (1, "Invalid array index.");
3305 if (TREE_OPERAND (t
, 2))
3306 CHECK_OP (2, "Invalid array lower bound.");
3307 if (TREE_OPERAND (t
, 3))
3308 CHECK_OP (3, "Invalid array stride.");
3310 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
3312 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3313 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3316 t
= TREE_OPERAND (t
, 0);
3319 if (!CONSTANT_CLASS_P (t
) && !is_gimple_lvalue (t
))
3321 error ("Invalid reference prefix.");
3333 case UNORDERED_EXPR
:
3344 case TRUNC_DIV_EXPR
:
3346 case FLOOR_DIV_EXPR
:
3347 case ROUND_DIV_EXPR
:
3348 case TRUNC_MOD_EXPR
:
3350 case FLOOR_MOD_EXPR
:
3351 case ROUND_MOD_EXPR
:
3353 case EXACT_DIV_EXPR
:
3363 CHECK_OP (0, "Invalid operand to binary operator");
3364 CHECK_OP (1, "Invalid operand to binary operator");
3376 /* Verify STMT, return true if STMT is not in GIMPLE form.
3377 TODO: Implement type checking. */
3380 verify_stmt (tree stmt
, bool last_in_block
)
3384 if (!is_gimple_stmt (stmt
))
3386 error ("Is not a valid GIMPLE statement.");
3390 addr
= walk_tree (&stmt
, verify_expr
, NULL
, NULL
);
3393 debug_generic_stmt (addr
);
3397 /* If the statement is marked as part of an EH region, then it is
3398 expected that the statement could throw. Verify that when we
3399 have optimizations that simplify statements such that we prove
3400 that they cannot throw, that we update other data structures
3402 if (lookup_stmt_eh_region (stmt
) >= 0)
3404 if (!tree_could_throw_p (stmt
))
3406 error ("Statement marked for throw, but doesn%'t.");
3409 if (!last_in_block
&& tree_can_throw_internal (stmt
))
3411 error ("Statement marked for throw in middle of block.");
3419 debug_generic_stmt (stmt
);
3424 /* Return true when the T can be shared. */
3427 tree_node_can_be_shared (tree t
)
3429 if (IS_TYPE_OR_DECL_P (t
)
3430 /* We check for constants explicitly since they are not considered
3431 gimple invariants if they overflowed. */
3432 || CONSTANT_CLASS_P (t
)
3433 || is_gimple_min_invariant (t
)
3434 || TREE_CODE (t
) == SSA_NAME
3435 || t
== error_mark_node
)
3438 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
3441 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3442 /* We check for constants explicitly since they are not considered
3443 gimple invariants if they overflowed. */
3444 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 1))
3445 || is_gimple_min_invariant (TREE_OPERAND (t
, 1))))
3446 || (TREE_CODE (t
) == COMPONENT_REF
3447 || TREE_CODE (t
) == REALPART_EXPR
3448 || TREE_CODE (t
) == IMAGPART_EXPR
))
3449 t
= TREE_OPERAND (t
, 0);
3458 /* Called via walk_trees. Verify tree sharing. */
3461 verify_node_sharing (tree
* tp
, int *walk_subtrees
, void *data
)
3463 htab_t htab
= (htab_t
) data
;
3466 if (tree_node_can_be_shared (*tp
))
3468 *walk_subtrees
= false;
3472 slot
= htab_find_slot (htab
, *tp
, INSERT
);
3481 /* Verify the GIMPLE statement chain. */
3487 block_stmt_iterator bsi
;
3492 timevar_push (TV_TREE_STMT_VERIFY
);
3493 htab
= htab_create (37, htab_hash_pointer
, htab_eq_pointer
, NULL
);
3500 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
3502 int phi_num_args
= PHI_NUM_ARGS (phi
);
3504 for (i
= 0; i
< phi_num_args
; i
++)
3506 tree t
= PHI_ARG_DEF (phi
, i
);
3509 /* Addressable variables do have SSA_NAMEs but they
3510 are not considered gimple values. */
3511 if (TREE_CODE (t
) != SSA_NAME
3512 && TREE_CODE (t
) != FUNCTION_DECL
3513 && !is_gimple_val (t
))
3515 error ("PHI def is not a GIMPLE value");
3516 debug_generic_stmt (phi
);
3517 debug_generic_stmt (t
);
3521 addr
= walk_tree (&t
, verify_expr
, NULL
, NULL
);
3524 debug_generic_stmt (addr
);
3528 addr
= walk_tree (&t
, verify_node_sharing
, htab
, NULL
);
3531 error ("Incorrect sharing of tree nodes");
3532 debug_generic_stmt (phi
);
3533 debug_generic_stmt (addr
);
3539 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
3541 tree stmt
= bsi_stmt (bsi
);
3543 err
|= verify_stmt (stmt
, bsi_end_p (bsi
));
3544 addr
= walk_tree (&stmt
, verify_node_sharing
, htab
, NULL
);
3547 error ("Incorrect sharing of tree nodes");
3548 debug_generic_stmt (stmt
);
3549 debug_generic_stmt (addr
);
3556 internal_error ("verify_stmts failed.");
3559 timevar_pop (TV_TREE_STMT_VERIFY
);
3563 /* Verifies that the flow information is OK. */
3566 tree_verify_flow_info (void)
3570 block_stmt_iterator bsi
;
3575 if (ENTRY_BLOCK_PTR
->stmt_list
)
3577 error ("ENTRY_BLOCK has a statement list associated with it\n");
3581 if (EXIT_BLOCK_PTR
->stmt_list
)
3583 error ("EXIT_BLOCK has a statement list associated with it\n");
3587 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
3588 if (e
->flags
& EDGE_FALLTHRU
)
3590 error ("Fallthru to exit from bb %d\n", e
->src
->index
);
3596 bool found_ctrl_stmt
= false;
3598 /* Skip labels on the start of basic block. */
3599 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3601 if (TREE_CODE (bsi_stmt (bsi
)) != LABEL_EXPR
)
3604 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi
))) != bb
)
3606 tree stmt
= bsi_stmt (bsi
);
3607 error ("Label %s to block does not match in bb %d\n",
3608 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3613 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi
)))
3614 != current_function_decl
)
3616 tree stmt
= bsi_stmt (bsi
);
3617 error ("Label %s has incorrect context in bb %d\n",
3618 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt
))),
3624 /* Verify that body of basic block BB is free of control flow. */
3625 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
3627 tree stmt
= bsi_stmt (bsi
);
3629 if (found_ctrl_stmt
)
3631 error ("Control flow in the middle of basic block %d\n",
3636 if (stmt_ends_bb_p (stmt
))
3637 found_ctrl_stmt
= true;
3639 if (TREE_CODE (stmt
) == LABEL_EXPR
)
3641 error ("Label %s in the middle of basic block %d\n",
3642 IDENTIFIER_POINTER (DECL_NAME (stmt
)),
3647 bsi
= bsi_last (bb
);
3648 if (bsi_end_p (bsi
))
3651 stmt
= bsi_stmt (bsi
);
3653 if (is_ctrl_stmt (stmt
))
3655 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3656 if (e
->flags
& EDGE_FALLTHRU
)
3658 error ("Fallthru edge after a control statement in bb %d \n",
3664 switch (TREE_CODE (stmt
))
3670 if (TREE_CODE (COND_EXPR_THEN (stmt
)) != GOTO_EXPR
3671 || TREE_CODE (COND_EXPR_ELSE (stmt
)) != GOTO_EXPR
)
3673 error ("Structured COND_EXPR at the end of bb %d\n", bb
->index
);
3677 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
3679 if (!true_edge
|| !false_edge
3680 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
3681 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
3682 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3683 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
3684 || EDGE_COUNT (bb
->succs
) >= 3)
3686 error ("Wrong outgoing edge flags at end of bb %d\n",
3691 if (!has_label_p (true_edge
->dest
,
3692 GOTO_DESTINATION (COND_EXPR_THEN (stmt
))))
3694 error ("%<then%> label does not match edge at end of bb %d\n",
3699 if (!has_label_p (false_edge
->dest
,
3700 GOTO_DESTINATION (COND_EXPR_ELSE (stmt
))))
3702 error ("%<else%> label does not match edge at end of bb %d\n",
3710 if (simple_goto_p (stmt
))
3712 error ("Explicit goto at end of bb %d\n", bb
->index
);
3717 /* FIXME. We should double check that the labels in the
3718 destination blocks have their address taken. */
3719 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3720 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
3721 | EDGE_FALSE_VALUE
))
3722 || !(e
->flags
& EDGE_ABNORMAL
))
3724 error ("Wrong outgoing edge flags at end of bb %d\n",
3732 if (EDGE_COUNT (bb
->succs
) != 1
3733 || (EDGE_SUCC (bb
, 0)->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3734 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3736 error ("Wrong outgoing edge flags at end of bb %d\n", bb
->index
);
3739 if (EDGE_SUCC (bb
, 0)->dest
!= EXIT_BLOCK_PTR
)
3741 error ("Return edge does not point to exit in bb %d\n",
3754 vec
= SWITCH_LABELS (stmt
);
3755 n
= TREE_VEC_LENGTH (vec
);
3757 /* Mark all the destination basic blocks. */
3758 for (i
= 0; i
< n
; ++i
)
3760 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3761 basic_block label_bb
= label_to_block (lab
);
3763 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
3764 label_bb
->aux
= (void *)1;
3767 /* Verify that the case labels are sorted. */
3768 prev
= TREE_VEC_ELT (vec
, 0);
3769 for (i
= 1; i
< n
- 1; ++i
)
3771 tree c
= TREE_VEC_ELT (vec
, i
);
3774 error ("Found default case not at end of case vector");
3778 if (! tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
3780 error ("Case labels not sorted:\n ");
3781 print_generic_expr (stderr
, prev
, 0);
3782 fprintf (stderr
," is greater than ");
3783 print_generic_expr (stderr
, c
, 0);
3784 fprintf (stderr
," but comes before it.\n");
3789 if (CASE_LOW (TREE_VEC_ELT (vec
, n
- 1)))
3791 error ("No default case found at end of case vector");
3795 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3799 error ("Extra outgoing edge %d->%d\n",
3800 bb
->index
, e
->dest
->index
);
3803 e
->dest
->aux
= (void *)2;
3804 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
3805 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
3807 error ("Wrong outgoing edge flags at end of bb %d\n",
3813 /* Check that we have all of them. */
3814 for (i
= 0; i
< n
; ++i
)
3816 tree lab
= CASE_LABEL (TREE_VEC_ELT (vec
, i
));
3817 basic_block label_bb
= label_to_block (lab
);
3819 if (label_bb
->aux
!= (void *)2)
3821 error ("Missing edge %i->%i",
3822 bb
->index
, label_bb
->index
);
3827 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3828 e
->dest
->aux
= (void *)0;
3835 if (dom_computed
[CDI_DOMINATORS
] >= DOM_NO_FAST_QUERY
)
3836 verify_dominators (CDI_DOMINATORS
);
3842 /* Updates phi nodes after creating a forwarder block joined
3843 by edge FALLTHRU. */
3846 tree_make_forwarder_block (edge fallthru
)
3850 basic_block dummy
, bb
;
3851 tree phi
, new_phi
, var
;
3853 dummy
= fallthru
->src
;
3854 bb
= fallthru
->dest
;
3856 if (EDGE_COUNT (bb
->preds
) == 1)
3859 /* If we redirected a branch we must create new phi nodes at the
3861 for (phi
= phi_nodes (dummy
); phi
; phi
= PHI_CHAIN (phi
))
3863 var
= PHI_RESULT (phi
);
3864 new_phi
= create_phi_node (var
, bb
);
3865 SSA_NAME_DEF_STMT (var
) = new_phi
;
3866 SET_PHI_RESULT (phi
, make_ssa_name (SSA_NAME_VAR (var
), phi
));
3867 add_phi_arg (new_phi
, PHI_RESULT (phi
), fallthru
);
3870 /* Ensure that the PHI node chain is in the same order. */
3871 set_phi_nodes (bb
, phi_reverse (phi_nodes (bb
)));
3873 /* Add the arguments we have stored on edges. */
3874 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3879 flush_pending_stmts (e
);
3884 /* Return true if basic block BB does nothing except pass control
3885 flow to another block and that we can safely insert a label at
3886 the start of the successor block.
3888 As a precondition, we require that BB be not equal to
3892 tree_forwarder_block_p (basic_block bb
)
3894 block_stmt_iterator bsi
;
3896 /* BB must have a single outgoing edge. */
3897 if (EDGE_COUNT (bb
->succs
) != 1
3898 /* BB can not have any PHI nodes. This could potentially be
3899 relaxed early in compilation if we re-rewrote the variables
3900 appearing in any PHI nodes in forwarder blocks. */
3902 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3903 || EDGE_SUCC (bb
, 0)->dest
== EXIT_BLOCK_PTR
3904 /* Nor should this be an infinite loop. */
3905 || EDGE_SUCC (bb
, 0)->dest
== bb
3906 /* BB may not have an abnormal outgoing edge. */
3907 || (EDGE_SUCC (bb
, 0)->flags
& EDGE_ABNORMAL
))
3911 gcc_assert (bb
!= ENTRY_BLOCK_PTR
);
3914 /* Now walk through the statements. We can ignore labels, anything else
3915 means this is not a forwarder block. */
3916 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
3918 tree stmt
= bsi_stmt (bsi
);
3920 switch (TREE_CODE (stmt
))
3923 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
3932 if (find_edge (ENTRY_BLOCK_PTR
, bb
))
3938 /* Return true if BB has at least one abnormal incoming edge. */
3941 has_abnormal_incoming_edge_p (basic_block bb
)
3946 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3947 if (e
->flags
& EDGE_ABNORMAL
)
3953 /* Removes forwarder block BB. Returns false if this failed. If a new
3954 forwarder block is created due to redirection of edges, it is
3955 stored to worklist. */
3958 remove_forwarder_block (basic_block bb
, basic_block
**worklist
)
3960 edge succ
= EDGE_SUCC (bb
, 0), e
, s
;
3961 basic_block dest
= succ
->dest
;
3965 block_stmt_iterator bsi
, bsi_to
;
3966 bool seen_abnormal_edge
= false;
3968 /* We check for infinite loops already in tree_forwarder_block_p.
3969 However it may happen that the infinite loop is created
3970 afterwards due to removal of forwarders. */
3974 /* If the destination block consists of an nonlocal label, do not merge
3976 label
= first_stmt (bb
);
3978 && TREE_CODE (label
) == LABEL_EXPR
3979 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label
)))
3982 /* If there is an abnormal edge to basic block BB, but not into
3983 dest, problems might occur during removal of the phi node at out
3984 of ssa due to overlapping live ranges of registers.
3986 If there is an abnormal edge in DEST, the problems would occur
3987 anyway since cleanup_dead_labels would then merge the labels for
3988 two different eh regions, and rest of exception handling code
3991 So if there is an abnormal edge to BB, proceed only if there is
3992 no abnormal edge to DEST and there are no phi nodes in DEST. */
3993 if (has_abnormal_incoming_edge_p (bb
))
3995 seen_abnormal_edge
= true;
3997 if (has_abnormal_incoming_edge_p (dest
)
3998 || phi_nodes (dest
) != NULL_TREE
)
4002 /* If there are phi nodes in DEST, and some of the blocks that are
4003 predecessors of BB are also predecessors of DEST, check that the
4004 phi node arguments match. */
4005 if (phi_nodes (dest
))
4007 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4009 s
= find_edge (e
->src
, dest
);
4013 if (!phi_alternatives_equal (dest
, succ
, s
))
4018 /* Redirect the edges. */
4019 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4021 if (e
->flags
& EDGE_ABNORMAL
)
4023 /* If there is an abnormal edge, redirect it anyway, and
4024 move the labels to the new block to make it legal. */
4025 s
= redirect_edge_succ_nodup (e
, dest
);
4028 s
= redirect_edge_and_branch (e
, dest
);
4032 /* Create arguments for the phi nodes, since the edge was not
4034 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
4035 add_phi_arg (phi
, PHI_ARG_DEF (phi
, succ
->dest_idx
), s
);
4039 /* The source basic block might become a forwarder. We know
4040 that it was not a forwarder before, since it used to have
4041 at least two outgoing edges, so we may just add it to
4043 if (tree_forwarder_block_p (s
->src
))
4044 *(*worklist
)++ = s
->src
;
4048 if (seen_abnormal_edge
)
4050 /* Move the labels to the new block, so that the redirection of
4051 the abnormal edges works. */
4053 bsi_to
= bsi_start (dest
);
4054 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
4056 label
= bsi_stmt (bsi
);
4057 gcc_assert (TREE_CODE (label
) == LABEL_EXPR
);
4059 bsi_insert_before (&bsi_to
, label
, BSI_CONTINUE_LINKING
);
4063 /* Update the dominators. */
4064 if (dom_info_available_p (CDI_DOMINATORS
))
4066 basic_block dom
, dombb
, domdest
;
4068 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
4069 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
4072 /* Shortcut to avoid calling (relatively expensive)
4073 nearest_common_dominator unless necessary. */
4077 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
4079 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
4082 /* And kill the forwarder block. */
4083 delete_basic_block (bb
);
4088 /* Removes forwarder blocks. */
4091 cleanup_forwarder_blocks (void)
4094 bool changed
= false;
4095 basic_block
*worklist
= xmalloc (sizeof (basic_block
) * n_basic_blocks
);
4096 basic_block
*current
= worklist
;
4100 if (tree_forwarder_block_p (bb
))
4104 while (current
!= worklist
)
4107 changed
|= remove_forwarder_block (bb
, ¤t
);
4114 /* Return a non-special label in the head of basic block BLOCK.
4115 Create one if it doesn't exist. */
4118 tree_block_label (basic_block bb
)
4120 block_stmt_iterator i
, s
= bsi_start (bb
);
4124 for (i
= s
; !bsi_end_p (i
); first
= false, bsi_next (&i
))
4126 stmt
= bsi_stmt (i
);
4127 if (TREE_CODE (stmt
) != LABEL_EXPR
)
4129 label
= LABEL_EXPR_LABEL (stmt
);
4130 if (!DECL_NONLOCAL (label
))
4133 bsi_move_before (&i
, &s
);
4138 label
= create_artificial_label ();
4139 stmt
= build1 (LABEL_EXPR
, void_type_node
, label
);
4140 bsi_insert_before (&s
, stmt
, BSI_NEW_STMT
);
4145 /* Attempt to perform edge redirection by replacing a possibly complex
4146 jump instruction by a goto or by removing the jump completely.
4147 This can apply only if all edges now point to the same block. The
4148 parameters and return values are equivalent to
4149 redirect_edge_and_branch. */
4152 tree_try_redirect_by_replacing_jump (edge e
, basic_block target
)
4154 basic_block src
= e
->src
;
4155 block_stmt_iterator b
;
4158 /* We can replace or remove a complex jump only when we have exactly
4160 if (EDGE_COUNT (src
->succs
) != 2
4161 /* Verify that all targets will be TARGET. Specifically, the
4162 edge that is not E must also go to TARGET. */
4163 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
4169 stmt
= bsi_stmt (b
);
4171 if (TREE_CODE (stmt
) == COND_EXPR
4172 || TREE_CODE (stmt
) == SWITCH_EXPR
)
4175 e
= ssa_redirect_edge (e
, target
);
4176 e
->flags
= EDGE_FALLTHRU
;
4184 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4185 edge representing the redirected branch. */
4188 tree_redirect_edge_and_branch (edge e
, basic_block dest
)
4190 basic_block bb
= e
->src
;
4191 block_stmt_iterator bsi
;
4195 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4198 if (e
->src
!= ENTRY_BLOCK_PTR
4199 && (ret
= tree_try_redirect_by_replacing_jump (e
, dest
)))
4202 if (e
->dest
== dest
)
4205 label
= tree_block_label (dest
);
4207 bsi
= bsi_last (bb
);
4208 stmt
= bsi_end_p (bsi
) ? NULL
: bsi_stmt (bsi
);
4210 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
4213 stmt
= (e
->flags
& EDGE_TRUE_VALUE
4214 ? COND_EXPR_THEN (stmt
)
4215 : COND_EXPR_ELSE (stmt
));
4216 GOTO_DESTINATION (stmt
) = label
;
4220 /* No non-abnormal edges should lead from a non-simple goto, and
4221 simple ones should be represented implicitly. */
4226 tree cases
= get_cases_for_edge (e
, stmt
);
4228 /* If we have a list of cases associated with E, then use it
4229 as it's a lot faster than walking the entire case vector. */
4232 edge e2
= find_edge (e
->src
, dest
);
4239 CASE_LABEL (cases
) = label
;
4240 cases
= TREE_CHAIN (cases
);
4243 /* If there was already an edge in the CFG, then we need
4244 to move all the cases associated with E to E2. */
4247 tree cases2
= get_cases_for_edge (e2
, stmt
);
4249 TREE_CHAIN (last
) = TREE_CHAIN (cases2
);
4250 TREE_CHAIN (cases2
) = first
;
4255 tree vec
= SWITCH_LABELS (stmt
);
4256 size_t i
, n
= TREE_VEC_LENGTH (vec
);
4258 for (i
= 0; i
< n
; i
++)
4260 tree elt
= TREE_VEC_ELT (vec
, i
);
4262 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
4263 CASE_LABEL (elt
) = label
;
4272 e
->flags
|= EDGE_FALLTHRU
;
4276 /* Otherwise it must be a fallthru edge, and we don't need to
4277 do anything besides redirecting it. */
4278 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
4282 /* Update/insert PHI nodes as necessary. */
4284 /* Now update the edges in the CFG. */
4285 e
= ssa_redirect_edge (e
, dest
);
4291 /* Simple wrapper, as we can always redirect fallthru edges. */
4294 tree_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4296 e
= tree_redirect_edge_and_branch (e
, dest
);
4303 /* Splits basic block BB after statement STMT (but at least after the
4304 labels). If STMT is NULL, BB is split just after the labels. */
4307 tree_split_block (basic_block bb
, void *stmt
)
4309 block_stmt_iterator bsi
, bsi_tgt
;
4315 new_bb
= create_empty_bb (bb
);
4317 /* Redirect the outgoing edges. */
4318 new_bb
->succs
= bb
->succs
;
4320 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
4323 if (stmt
&& TREE_CODE ((tree
) stmt
) == LABEL_EXPR
)
4326 /* Move everything from BSI to the new basic block. */
4327 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4329 act
= bsi_stmt (bsi
);
4330 if (TREE_CODE (act
) == LABEL_EXPR
)
4343 bsi_tgt
= bsi_start (new_bb
);
4344 while (!bsi_end_p (bsi
))
4346 act
= bsi_stmt (bsi
);
4348 bsi_insert_after (&bsi_tgt
, act
, BSI_NEW_STMT
);
4355 /* Moves basic block BB after block AFTER. */
4358 tree_move_block_after (basic_block bb
, basic_block after
)
4360 if (bb
->prev_bb
== after
)
4364 link_block (bb
, after
);
4370 /* Return true if basic_block can be duplicated. */
4373 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED
)
4378 /* Create a duplicate of the basic block BB. NOTE: This does not
4379 preserve SSA form. */
4382 tree_duplicate_bb (basic_block bb
)
4385 block_stmt_iterator bsi
, bsi_tgt
;
4387 ssa_op_iter op_iter
;
4389 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
4391 /* First copy the phi nodes. We do not copy phi node arguments here,
4392 since the edges are not ready yet. Keep the chain of phi nodes in
4393 the same order, so that we can add them later. */
4394 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4396 mark_for_rewrite (PHI_RESULT (phi
));
4397 create_phi_node (PHI_RESULT (phi
), new_bb
);
4399 set_phi_nodes (new_bb
, phi_reverse (phi_nodes (new_bb
)));
4401 bsi_tgt
= bsi_start (new_bb
);
4402 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4404 tree stmt
= bsi_stmt (bsi
);
4407 if (TREE_CODE (stmt
) == LABEL_EXPR
)
4410 /* Record the definitions. */
4411 get_stmt_operands (stmt
);
4413 FOR_EACH_SSA_TREE_OPERAND (val
, stmt
, op_iter
, SSA_OP_ALL_DEFS
)
4414 mark_for_rewrite (val
);
4416 copy
= unshare_expr (stmt
);
4418 /* Copy also the virtual operands. */
4419 get_stmt_ann (copy
);
4420 copy_virtual_operands (copy
, stmt
);
4422 bsi_insert_after (&bsi_tgt
, copy
, BSI_NEW_STMT
);
4428 /* Basic block BB_COPY was created by code duplication. Add phi node
4429 arguments for edges going out of BB_COPY. The blocks that were
4430 duplicated have rbi->duplicated set to one. */
4433 add_phi_args_after_copy_bb (basic_block bb_copy
)
4435 basic_block bb
, dest
;
4438 tree phi
, phi_copy
, phi_next
, def
;
4440 bb
= bb_copy
->rbi
->original
;
4442 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
4444 if (!phi_nodes (e_copy
->dest
))
4447 if (e_copy
->dest
->rbi
->duplicated
)
4448 dest
= e_copy
->dest
->rbi
->original
;
4450 dest
= e_copy
->dest
;
4452 e
= find_edge (bb
, dest
);
4455 /* During loop unrolling the target of the latch edge is copied.
4456 In this case we are not looking for edge to dest, but to
4457 duplicated block whose original was dest. */
4458 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4459 if (e
->dest
->rbi
->duplicated
4460 && e
->dest
->rbi
->original
== dest
)
4463 gcc_assert (e
!= NULL
);
4466 for (phi
= phi_nodes (e
->dest
), phi_copy
= phi_nodes (e_copy
->dest
);
4468 phi
= phi_next
, phi_copy
= PHI_CHAIN (phi_copy
))
4470 phi_next
= PHI_CHAIN (phi
);
4472 gcc_assert (PHI_RESULT (phi
) == PHI_RESULT (phi_copy
));
4473 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4474 add_phi_arg (phi_copy
, def
, e_copy
);
4479 /* Blocks in REGION_COPY array of length N_REGION were created by
4480 duplication of basic blocks. Add phi node arguments for edges
4481 going from these blocks. */
4484 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
)
4488 for (i
= 0; i
< n_region
; i
++)
4489 region_copy
[i
]->rbi
->duplicated
= 1;
4491 for (i
= 0; i
< n_region
; i
++)
4492 add_phi_args_after_copy_bb (region_copy
[i
]);
4494 for (i
= 0; i
< n_region
; i
++)
4495 region_copy
[i
]->rbi
->duplicated
= 0;
4498 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4500 struct ssa_name_map_entry
4506 /* Hash function for ssa_name_map_entry. */
4509 ssa_name_map_entry_hash (const void *entry
)
4511 const struct ssa_name_map_entry
*en
= entry
;
4512 return SSA_NAME_VERSION (en
->from_name
);
4515 /* Equality function for ssa_name_map_entry. */
4518 ssa_name_map_entry_eq (const void *in_table
, const void *ssa_name
)
4520 const struct ssa_name_map_entry
*en
= in_table
;
4522 return en
->from_name
== ssa_name
;
4525 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4529 allocate_ssa_names (bitmap definitions
, htab_t
*map
)
4532 struct ssa_name_map_entry
*entry
;
4538 *map
= htab_create (10, ssa_name_map_entry_hash
,
4539 ssa_name_map_entry_eq
, free
);
4540 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4542 name
= ssa_name (ver
);
4543 slot
= htab_find_slot_with_hash (*map
, name
, SSA_NAME_VERSION (name
),
4549 entry
= xmalloc (sizeof (struct ssa_name_map_entry
));
4550 entry
->from_name
= name
;
4553 entry
->to_name
= duplicate_ssa_name (name
, SSA_NAME_DEF_STMT (name
));
4557 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4558 by the mapping MAP. */
4561 rewrite_to_new_ssa_names_def (def_operand_p def
, tree stmt
, htab_t map
)
4563 tree name
= DEF_FROM_PTR (def
);
4564 struct ssa_name_map_entry
*entry
;
4566 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
4568 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4572 SET_DEF (def
, entry
->to_name
);
4573 SSA_NAME_DEF_STMT (entry
->to_name
) = stmt
;
4576 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4579 rewrite_to_new_ssa_names_use (use_operand_p use
, htab_t map
)
4581 tree name
= USE_FROM_PTR (use
);
4582 struct ssa_name_map_entry
*entry
;
4584 if (TREE_CODE (name
) != SSA_NAME
)
4587 entry
= htab_find_with_hash (map
, name
, SSA_NAME_VERSION (name
));
4591 SET_USE (use
, entry
->to_name
);
4594 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4598 rewrite_to_new_ssa_names_bb (basic_block bb
, htab_t map
)
4604 block_stmt_iterator bsi
;
4608 v_may_def_optype v_may_defs
;
4609 v_must_def_optype v_must_defs
;
4612 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4613 if (e
->flags
& EDGE_ABNORMAL
)
4616 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
4618 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi
), phi
, map
);
4620 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)) = 1;
4623 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
4625 stmt
= bsi_stmt (bsi
);
4626 get_stmt_operands (stmt
);
4627 ann
= stmt_ann (stmt
);
4629 uses
= USE_OPS (ann
);
4630 for (i
= 0; i
< NUM_USES (uses
); i
++)
4631 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses
, i
), map
);
4633 defs
= DEF_OPS (ann
);
4634 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
4635 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs
, i
), stmt
, map
);
4637 vuses
= VUSE_OPS (ann
);
4638 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
4639 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses
, i
), map
);
4641 v_may_defs
= V_MAY_DEF_OPS (ann
);
4642 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
4644 rewrite_to_new_ssa_names_use
4645 (V_MAY_DEF_OP_PTR (v_may_defs
, i
), map
);
4646 rewrite_to_new_ssa_names_def
4647 (V_MAY_DEF_RESULT_PTR (v_may_defs
, i
), stmt
, map
);
4650 v_must_defs
= V_MUST_DEF_OPS (ann
);
4651 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
4653 rewrite_to_new_ssa_names_def
4654 (V_MUST_DEF_RESULT_PTR (v_must_defs
, i
), stmt
, map
);
4655 rewrite_to_new_ssa_names_use
4656 (V_MUST_DEF_KILL_PTR (v_must_defs
, i
), map
);
4660 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4661 for (phi
= phi_nodes (e
->dest
); phi
; phi
= PHI_CHAIN (phi
))
4663 rewrite_to_new_ssa_names_use
4664 (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
), map
);
4666 if (e
->flags
& EDGE_ABNORMAL
)
4668 tree op
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4669 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op
) = 1;
4674 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4675 by the mapping MAP. */
4678 rewrite_to_new_ssa_names (basic_block
*region
, unsigned n_region
, htab_t map
)
4682 for (r
= 0; r
< n_region
; r
++)
4683 rewrite_to_new_ssa_names_bb (region
[r
], map
);
4686 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4687 important exit edge EXIT. By important we mean that no SSA name defined
4688 inside region is live over the other exit edges of the region. All entry
4689 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4690 to the duplicate of the region. SSA form, dominance and loop information
4691 is updated. The new basic blocks are stored to REGION_COPY in the same
4692 order as they had in REGION, provided that REGION_COPY is not NULL.
4693 The function returns false if it is unable to copy the region,
4697 tree_duplicate_sese_region (edge entry
, edge exit
,
4698 basic_block
*region
, unsigned n_region
,
4699 basic_block
*region_copy
)
4701 unsigned i
, n_doms
, ver
;
4702 bool free_region_copy
= false, copying_header
= false;
4703 struct loop
*loop
= entry
->dest
->loop_father
;
4708 htab_t ssa_name_map
= NULL
;
4712 if (!can_copy_bbs_p (region
, n_region
))
4715 /* Some sanity checking. Note that we do not check for all possible
4716 missuses of the functions. I.e. if you ask to copy something weird,
4717 it will work, but the state of structures probably will not be
4720 for (i
= 0; i
< n_region
; i
++)
4722 /* We do not handle subloops, i.e. all the blocks must belong to the
4724 if (region
[i
]->loop_father
!= loop
)
4727 if (region
[i
] != entry
->dest
4728 && region
[i
] == loop
->header
)
4734 /* In case the function is used for loop header copying (which is the primary
4735 use), ensure that EXIT and its copy will be new latch and entry edges. */
4736 if (loop
->header
== entry
->dest
)
4738 copying_header
= true;
4739 loop
->copy
= loop
->outer
;
4741 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
4744 for (i
= 0; i
< n_region
; i
++)
4745 if (region
[i
] != exit
->src
4746 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
4752 region_copy
= xmalloc (sizeof (basic_block
) * n_region
);
4753 free_region_copy
= true;
4756 gcc_assert (!any_marked_for_rewrite_p ());
4758 /* Record blocks outside the region that are duplicated by something
4760 doms
= xmalloc (sizeof (basic_block
) * n_basic_blocks
);
4761 n_doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
, doms
);
4763 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
);
4764 definitions
= marked_ssa_names ();
4768 loop
->header
= exit
->dest
;
4769 loop
->latch
= exit
->src
;
4772 /* Redirect the entry and add the phi node arguments. */
4773 redirected
= redirect_edge_and_branch (entry
, entry
->dest
->rbi
->copy
);
4774 gcc_assert (redirected
!= NULL
);
4775 flush_pending_stmts (entry
);
4777 /* Concerning updating of dominators: We must recount dominators
4778 for entry block and its copy. Anything that is outside of the region, but
4779 was dominated by something inside needs recounting as well. */
4780 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
4781 doms
[n_doms
++] = entry
->dest
->rbi
->original
;
4782 iterate_fix_dominators (CDI_DOMINATORS
, doms
, n_doms
);
4785 /* Add the other phi node arguments. */
4786 add_phi_args_after_copy (region_copy
, n_region
);
4788 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4789 uses, it should be possible to emit phi nodes just for definitions that
4790 are used outside region. */
4791 EXECUTE_IF_SET_IN_BITMAP (definitions
, 0, ver
, bi
)
4793 tree name
= ssa_name (ver
);
4795 phi
= create_phi_node (name
, exit
->dest
);
4796 add_phi_arg (phi
, name
, exit
);
4797 add_phi_arg (phi
, name
, exit_copy
);
4799 SSA_NAME_DEF_STMT (name
) = phi
;
4802 /* And create new definitions inside region and its copy. TODO -- once we
4803 have immediate uses, it might be better to leave definitions in region
4804 unchanged, create new ssa names for phi nodes on exit, and rewrite
4805 the uses, to avoid changing the copied region. */
4806 allocate_ssa_names (definitions
, &ssa_name_map
);
4807 rewrite_to_new_ssa_names (region
, n_region
, ssa_name_map
);
4808 allocate_ssa_names (definitions
, &ssa_name_map
);
4809 rewrite_to_new_ssa_names (region_copy
, n_region
, ssa_name_map
);
4810 htab_delete (ssa_name_map
);
4812 if (free_region_copy
)
4815 unmark_all_for_rewrite ();
4816 BITMAP_XFREE (definitions
);
4821 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4824 dump_function_to_file (tree fn
, FILE *file
, int flags
)
4826 tree arg
, vars
, var
;
4827 bool ignore_topmost_bind
= false, any_var
= false;
4831 fprintf (file
, "%s (", lang_hooks
.decl_printable_name (fn
, 2));
4833 arg
= DECL_ARGUMENTS (fn
);
4836 print_generic_expr (file
, arg
, dump_flags
);
4837 if (TREE_CHAIN (arg
))
4838 fprintf (file
, ", ");
4839 arg
= TREE_CHAIN (arg
);
4841 fprintf (file
, ")\n");
4843 if (flags
& TDF_RAW
)
4845 dump_node (fn
, TDF_SLIM
| flags
, file
);
4849 /* When GIMPLE is lowered, the variables are no longer available in
4850 BIND_EXPRs, so display them separately. */
4851 if (cfun
&& cfun
->unexpanded_var_list
)
4853 ignore_topmost_bind
= true;
4855 fprintf (file
, "{\n");
4856 for (vars
= cfun
->unexpanded_var_list
; vars
; vars
= TREE_CHAIN (vars
))
4858 var
= TREE_VALUE (vars
);
4860 print_generic_decl (file
, var
, flags
);
4861 fprintf (file
, "\n");
4867 if (basic_block_info
)
4869 /* Make a CFG based dump. */
4870 check_bb_profile (ENTRY_BLOCK_PTR
, file
);
4871 if (!ignore_topmost_bind
)
4872 fprintf (file
, "{\n");
4874 if (any_var
&& n_basic_blocks
)
4875 fprintf (file
, "\n");
4878 dump_generic_bb (file
, bb
, 2, flags
);
4880 fprintf (file
, "}\n");
4881 check_bb_profile (EXIT_BLOCK_PTR
, file
);
4887 /* Make a tree based dump. */
4888 chain
= DECL_SAVED_TREE (fn
);
4890 if (TREE_CODE (chain
) == BIND_EXPR
)
4892 if (ignore_topmost_bind
)
4894 chain
= BIND_EXPR_BODY (chain
);
4902 if (!ignore_topmost_bind
)
4903 fprintf (file
, "{\n");
4908 fprintf (file
, "\n");
4910 print_generic_stmt_indented (file
, chain
, flags
, indent
);
4911 if (ignore_topmost_bind
)
4912 fprintf (file
, "}\n");
4915 fprintf (file
, "\n\n");
4919 /* Pretty print of the loops intermediate representation. */
4920 static void print_loop (FILE *, struct loop
*, int);
4921 static void print_pred_bbs (FILE *, basic_block bb
);
4922 static void print_succ_bbs (FILE *, basic_block bb
);
4925 /* Print the predecessors indexes of edge E on FILE. */
4928 print_pred_bbs (FILE *file
, basic_block bb
)
4933 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4934 fprintf (file
, "bb_%d", e
->src
->index
);
4938 /* Print the successors indexes of edge E on FILE. */
4941 print_succ_bbs (FILE *file
, basic_block bb
)
4946 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4947 fprintf (file
, "bb_%d", e
->src
->index
);
4951 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4954 print_loop (FILE *file
, struct loop
*loop
, int indent
)
4962 s_indent
= (char *) alloca ((size_t) indent
+ 1);
4963 memset ((void *) s_indent
, ' ', (size_t) indent
);
4964 s_indent
[indent
] = '\0';
4966 /* Print the loop's header. */
4967 fprintf (file
, "%sloop_%d\n", s_indent
, loop
->num
);
4969 /* Print the loop's body. */
4970 fprintf (file
, "%s{\n", s_indent
);
4972 if (bb
->loop_father
== loop
)
4974 /* Print the basic_block's header. */
4975 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
4976 print_pred_bbs (file
, bb
);
4977 fprintf (file
, "}, succs = {");
4978 print_succ_bbs (file
, bb
);
4979 fprintf (file
, "})\n");
4981 /* Print the basic_block's body. */
4982 fprintf (file
, "%s {\n", s_indent
);
4983 tree_dump_bb (bb
, file
, indent
+ 4);
4984 fprintf (file
, "%s }\n", s_indent
);
4987 print_loop (file
, loop
->inner
, indent
+ 2);
4988 fprintf (file
, "%s}\n", s_indent
);
4989 print_loop (file
, loop
->next
, indent
);
4993 /* Follow a CFG edge from the entry point of the program, and on entry
4994 of a loop, pretty print the loop structure on FILE. */
4997 print_loop_ir (FILE *file
)
5001 bb
= BASIC_BLOCK (0);
5002 if (bb
&& bb
->loop_father
)
5003 print_loop (file
, bb
->loop_father
, 0);
5007 /* Debugging loops structure at tree level. */
5010 debug_loop_ir (void)
5012 print_loop_ir (stderr
);
5016 /* Return true if BB ends with a call, possibly followed by some
5017 instructions that must stay with the call. Return false,
5021 tree_block_ends_with_call_p (basic_block bb
)
5023 block_stmt_iterator bsi
= bsi_last (bb
);
5024 return get_call_expr_in (bsi_stmt (bsi
)) != NULL
;
5028 /* Return true if BB ends with a conditional branch. Return false,
5032 tree_block_ends_with_condjump_p (basic_block bb
)
5034 tree stmt
= tsi_stmt (bsi_last (bb
).tsi
);
5035 return (TREE_CODE (stmt
) == COND_EXPR
);
5039 /* Return true if we need to add fake edge to exit at statement T.
5040 Helper function for tree_flow_call_edges_add. */
5043 need_fake_edge_p (tree t
)
5047 /* NORETURN and LONGJMP calls already have an edge to exit.
5048 CONST, PURE and ALWAYS_RETURN calls do not need one.
5049 We don't currently check for CONST and PURE here, although
5050 it would be a good idea, because those attributes are
5051 figured out from the RTL in mark_constant_function, and
5052 the counter incrementation code from -fprofile-arcs
5053 leads to different results from -fbranch-probabilities. */
5054 call
= get_call_expr_in (t
);
5056 && !(call_expr_flags (call
) & (ECF_NORETURN
| ECF_ALWAYS_RETURN
)))
5059 if (TREE_CODE (t
) == ASM_EXPR
5060 && (ASM_VOLATILE_P (t
) || ASM_INPUT_P (t
)))
5067 /* Add fake edges to the function exit for any non constant and non
5068 noreturn calls, volatile inline assembly in the bitmap of blocks
5069 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5070 the number of blocks that were split.
5072 The goal is to expose cases in which entering a basic block does
5073 not imply that all subsequent instructions must be executed. */
5076 tree_flow_call_edges_add (sbitmap blocks
)
5079 int blocks_split
= 0;
5080 int last_bb
= last_basic_block
;
5081 bool check_last_block
= false;
5083 if (n_basic_blocks
== 0)
5087 check_last_block
= true;
5089 check_last_block
= TEST_BIT (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
5091 /* In the last basic block, before epilogue generation, there will be
5092 a fallthru edge to EXIT. Special care is required if the last insn
5093 of the last basic block is a call because make_edge folds duplicate
5094 edges, which would result in the fallthru edge also being marked
5095 fake, which would result in the fallthru edge being removed by
5096 remove_fake_edges, which would result in an invalid CFG.
5098 Moreover, we can't elide the outgoing fake edge, since the block
5099 profiler needs to take this into account in order to solve the minimal
5100 spanning tree in the case that the call doesn't return.
5102 Handle this by adding a dummy instruction in a new last basic block. */
5103 if (check_last_block
)
5105 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
5106 block_stmt_iterator bsi
= bsi_last (bb
);
5108 if (!bsi_end_p (bsi
))
5111 if (need_fake_edge_p (t
))
5115 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
5118 bsi_insert_on_edge (e
, build_empty_stmt ());
5119 bsi_commit_edge_inserts ();
5124 /* Now add fake edges to the function exit for any non constant
5125 calls since there is no way that we can determine if they will
5127 for (i
= 0; i
< last_bb
; i
++)
5129 basic_block bb
= BASIC_BLOCK (i
);
5130 block_stmt_iterator bsi
;
5131 tree stmt
, last_stmt
;
5136 if (blocks
&& !TEST_BIT (blocks
, i
))
5139 bsi
= bsi_last (bb
);
5140 if (!bsi_end_p (bsi
))
5142 last_stmt
= bsi_stmt (bsi
);
5145 stmt
= bsi_stmt (bsi
);
5146 if (need_fake_edge_p (stmt
))
5149 /* The handling above of the final block before the
5150 epilogue should be enough to verify that there is
5151 no edge to the exit block in CFG already.
5152 Calling make_edge in such case would cause us to
5153 mark that edge as fake and remove it later. */
5154 #ifdef ENABLE_CHECKING
5155 if (stmt
== last_stmt
)
5157 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
5158 gcc_assert (e
== NULL
);
5162 /* Note that the following may create a new basic block
5163 and renumber the existing basic blocks. */
5164 if (stmt
!= last_stmt
)
5166 e
= split_block (bb
, stmt
);
5170 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
5174 while (!bsi_end_p (bsi
));
5179 verify_flow_info ();
5181 return blocks_split
;
5185 tree_purge_dead_eh_edges (basic_block bb
)
5187 bool changed
= false;
5190 tree stmt
= last_stmt (bb
);
5192 if (stmt
&& tree_can_throw_internal (stmt
))
5195 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5197 if (e
->flags
& EDGE_EH
)
5206 /* Removal of dead EH edges might change dominators of not
5207 just immediate successors. E.g. when bb1 is changed so that
5208 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5209 eh edges purged by this function in:
5221 idom(bb5) must be recomputed. For now just free the dominance
5224 free_dominance_info (CDI_DOMINATORS
);
5230 tree_purge_all_dead_eh_edges (bitmap blocks
)
5232 bool changed
= false;
5236 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
5238 changed
|= tree_purge_dead_eh_edges (BASIC_BLOCK (i
));
5244 /* This function is called whenever a new edge is created or
5248 tree_execute_on_growing_pred (edge e
)
5250 basic_block bb
= e
->dest
;
5253 reserve_phi_args_for_new_edge (bb
);
5256 /* This function is called immediately before edge E is removed from
5257 the edge vector E->dest->preds. */
5260 tree_execute_on_shrinking_pred (edge e
)
5262 if (phi_nodes (e
->dest
))
5263 remove_phi_args (e
);
5266 struct cfg_hooks tree_cfg_hooks
= {
5268 tree_verify_flow_info
,
5269 tree_dump_bb
, /* dump_bb */
5270 create_bb
, /* create_basic_block */
5271 tree_redirect_edge_and_branch
,/* redirect_edge_and_branch */
5272 tree_redirect_edge_and_branch_force
,/* redirect_edge_and_branch_force */
5273 remove_bb
, /* delete_basic_block */
5274 tree_split_block
, /* split_block */
5275 tree_move_block_after
, /* move_block_after */
5276 tree_can_merge_blocks_p
, /* can_merge_blocks_p */
5277 tree_merge_blocks
, /* merge_blocks */
5278 tree_predict_edge
, /* predict_edge */
5279 tree_predicted_by_p
, /* predicted_by_p */
5280 tree_can_duplicate_bb_p
, /* can_duplicate_block_p */
5281 tree_duplicate_bb
, /* duplicate_block */
5282 tree_split_edge
, /* split_edge */
5283 tree_make_forwarder_block
, /* make_forward_block */
5284 NULL
, /* tidy_fallthru_edge */
5285 tree_block_ends_with_call_p
, /* block_ends_with_call_p */
5286 tree_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
5287 tree_flow_call_edges_add
, /* flow_call_edges_add */
5288 tree_execute_on_growing_pred
, /* execute_on_growing_pred */
5289 tree_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
5293 /* Split all critical edges. */
5296 split_critical_edges (void)
5302 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5303 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5304 mappings around the calls to split_edge. */
5305 start_recording_case_labels ();
5308 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5309 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
5314 end_recording_case_labels ();
5317 struct tree_opt_pass pass_split_crit_edges
=
5319 "crited", /* name */
5321 split_critical_edges
, /* execute */
5324 0, /* static_pass_number */
5325 TV_TREE_SPLIT_EDGES
, /* tv_id */
5326 PROP_cfg
, /* properties required */
5327 PROP_no_crit_edges
, /* properties_provided */
5328 0, /* properties_destroyed */
5329 0, /* todo_flags_start */
5330 TODO_dump_func
, /* todo_flags_finish */
5335 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5336 a temporary, make sure and register it to be renamed if necessary,
5337 and finally return the temporary. Put the statements to compute
5338 EXP before the current statement in BSI. */
5341 gimplify_val (block_stmt_iterator
*bsi
, tree type
, tree exp
)
5343 tree t
, new_stmt
, orig_stmt
;
5345 if (is_gimple_val (exp
))
5348 t
= make_rename_temp (type
, NULL
);
5349 new_stmt
= build (MODIFY_EXPR
, type
, t
, exp
);
5351 orig_stmt
= bsi_stmt (*bsi
);
5352 SET_EXPR_LOCUS (new_stmt
, EXPR_LOCUS (orig_stmt
));
5353 TREE_BLOCK (new_stmt
) = TREE_BLOCK (orig_stmt
);
5355 bsi_insert_before (bsi
, new_stmt
, BSI_SAME_STMT
);
5360 /* Build a ternary operation and gimplify it. Emit code before BSI.
5361 Return the gimple_val holding the result. */
5364 gimplify_build3 (block_stmt_iterator
*bsi
, enum tree_code code
,
5365 tree type
, tree a
, tree b
, tree c
)
5369 ret
= fold (build3 (code
, type
, a
, b
, c
));
5372 return gimplify_val (bsi
, type
, ret
);
5375 /* Build a binary operation and gimplify it. Emit code before BSI.
5376 Return the gimple_val holding the result. */
5379 gimplify_build2 (block_stmt_iterator
*bsi
, enum tree_code code
,
5380 tree type
, tree a
, tree b
)
5384 ret
= fold (build2 (code
, type
, a
, b
));
5387 return gimplify_val (bsi
, type
, ret
);
5390 /* Build a unary operation and gimplify it. Emit code before BSI.
5391 Return the gimple_val holding the result. */
5394 gimplify_build1 (block_stmt_iterator
*bsi
, enum tree_code code
, tree type
,
5399 ret
= fold (build1 (code
, type
, a
));
5402 return gimplify_val (bsi
, type
, ret
);
5407 /* Emit return warnings. */
5410 execute_warn_function_return (void)
5412 #ifdef USE_MAPPED_LOCATION
5413 source_location location
;
5421 if (warn_missing_noreturn
5422 && !TREE_THIS_VOLATILE (cfun
->decl
)
5423 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0
5424 && !lang_hooks
.function
.missing_noreturn_ok_p (cfun
->decl
))
5425 warning ("%Jfunction might be possible candidate for "
5426 "attribute %<noreturn%>",
5429 /* If we have a path to EXIT, then we do return. */
5430 if (TREE_THIS_VOLATILE (cfun
->decl
)
5431 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
5433 #ifdef USE_MAPPED_LOCATION
5434 location
= UNKNOWN_LOCATION
;
5438 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5440 last
= last_stmt (e
->src
);
5441 if (TREE_CODE (last
) == RETURN_EXPR
5442 #ifdef USE_MAPPED_LOCATION
5443 && (location
= EXPR_LOCATION (last
)) != UNKNOWN_LOCATION
)
5445 && (locus
= EXPR_LOCUS (last
)) != NULL
)
5449 #ifdef USE_MAPPED_LOCATION
5450 if (location
== UNKNOWN_LOCATION
)
5451 location
= cfun
->function_end_locus
;
5452 warning ("%H%<noreturn%> function does return", &location
);
5455 locus
= &cfun
->function_end_locus
;
5456 warning ("%H%<noreturn%> function does return", locus
);
5460 /* If we see "return;" in some basic block, then we do reach the end
5461 without returning a value. */
5462 else if (warn_return_type
5463 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
5464 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
5466 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5468 tree last
= last_stmt (e
->src
);
5469 if (TREE_CODE (last
) == RETURN_EXPR
5470 && TREE_OPERAND (last
, 0) == NULL
)
5472 #ifdef USE_MAPPED_LOCATION
5473 location
= EXPR_LOCATION (last
);
5474 if (location
== UNKNOWN_LOCATION
)
5475 location
= cfun
->function_end_locus
;
5476 warning ("%Hcontrol reaches end of non-void function", &location
);
5478 locus
= EXPR_LOCUS (last
);
5480 locus
= &cfun
->function_end_locus
;
5481 warning ("%Hcontrol reaches end of non-void function", locus
);
5490 /* Given a basic block B which ends with a conditional and has
5491 precisely two successors, determine which of the edges is taken if
5492 the conditional is true and which is taken if the conditional is
5493 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5496 extract_true_false_edges_from_block (basic_block b
,
5500 edge e
= EDGE_SUCC (b
, 0);
5502 if (e
->flags
& EDGE_TRUE_VALUE
)
5505 *false_edge
= EDGE_SUCC (b
, 1);
5510 *true_edge
= EDGE_SUCC (b
, 1);
5514 struct tree_opt_pass pass_warn_function_return
=
5518 execute_warn_function_return
, /* execute */
5521 0, /* static_pass_number */
5523 PROP_cfg
, /* properties_required */
5524 0, /* properties_provided */
5525 0, /* properties_destroyed */
5526 0, /* todo_flags_start */
5527 0, /* todo_flags_finish */
5531 #include "gt-tree-cfg.h"