2005-03-29 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / tree-cfg.c
blobb1ff8e6d97b93196b48bc4cfc4152f0ad840b814
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
86 static htab_t edge_to_cases;
88 /* CFG statistics. */
89 struct cfg_stats_d
91 long num_merged_labels;
94 static struct cfg_stats_d cfg_stats;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
99 /* Basic blocks and flowgraphs. */
100 static basic_block create_bb (void *, void *, basic_block);
101 static void create_block_annotation (basic_block);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree);
105 static void factor_computed_gotos (void);
107 /* Edges. */
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block);
110 static void make_exit_edges (basic_block);
111 static void make_cond_expr_edges (basic_block);
112 static void make_switch_expr_edges (basic_block);
113 static void make_goto_expr_edges (basic_block);
114 static edge tree_redirect_edge_and_branch (edge, basic_block);
115 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116 static void split_critical_edges (void);
117 static bool remove_fallthru_edge (VEC(edge) *);
119 /* Various helpers. */
120 static inline bool stmt_starts_bb_p (tree, tree);
121 static int tree_verify_flow_info (void);
122 static void tree_make_forwarder_block (edge);
123 static bool tree_forwarder_block_p (basic_block, bool);
124 static void tree_cfg2vcg (FILE *);
126 /* Flowgraph optimization and cleanup. */
127 static void tree_merge_blocks (basic_block, basic_block);
128 static bool tree_can_merge_blocks_p (basic_block, basic_block);
129 static void remove_bb (basic_block);
130 static bool cleanup_control_flow (void);
131 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
132 static edge find_taken_edge_computed_goto (basic_block, tree);
133 static edge find_taken_edge_cond_expr (basic_block, tree);
134 static edge find_taken_edge_switch_expr (basic_block, tree);
135 static tree find_case_label_for_value (tree, tree);
136 static bool phi_alternatives_equal (basic_block, edge, edge);
137 static bool cleanup_forwarder_blocks (void);
140 /*---------------------------------------------------------------------------
141 Create basic blocks
142 ---------------------------------------------------------------------------*/
144 /* Entry point to the CFG builder for trees. TP points to the list of
145 statements to be added to the flowgraph. */
147 static void
148 build_tree_cfg (tree *tp)
150 /* Register specific tree functions. */
151 tree_register_cfg_hooks ();
153 /* Initialize rbi_pool. */
154 alloc_rbi_pool ();
156 /* Initialize the basic block array. */
157 init_flow ();
158 profile_status = PROFILE_ABSENT;
159 n_basic_blocks = 0;
160 last_basic_block = 0;
161 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
162 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
164 /* Build a mapping of labels to their associated blocks. */
165 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
166 "label to block map");
168 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
169 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
171 found_computed_goto = 0;
172 make_blocks (*tp);
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto)
180 factor_computed_gotos ();
182 /* Make sure there is always at least one block, even if it's empty. */
183 if (n_basic_blocks == 0)
184 create_empty_bb (ENTRY_BLOCK_PTR);
186 create_block_annotation (ENTRY_BLOCK_PTR);
187 create_block_annotation (EXIT_BLOCK_PTR);
189 /* Adjust the size of the array. */
190 VARRAY_GROW (basic_block_info, n_basic_blocks);
192 /* To speed up statement iterator walks, we first purge dead labels. */
193 cleanup_dead_labels ();
195 /* Group case nodes to reduce the number of edges.
196 We do this after cleaning up dead labels because otherwise we miss
197 a lot of obvious case merging opportunities. */
198 group_case_labels ();
200 /* Create the edges of the flowgraph. */
201 make_edges ();
203 /* Debugging dumps. */
205 /* Write the flowgraph to a VCG file. */
207 int local_dump_flags;
208 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
209 if (dump_file)
211 tree_cfg2vcg (dump_file);
212 dump_end (TDI_vcg, dump_file);
216 /* Dump a textual representation of the flowgraph. */
217 if (dump_file)
218 dump_tree_cfg (dump_file, dump_flags);
221 static void
222 execute_build_cfg (void)
224 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
227 struct tree_opt_pass pass_build_cfg =
229 "cfg", /* name */
230 NULL, /* gate */
231 execute_build_cfg, /* execute */
232 NULL, /* sub */
233 NULL, /* next */
234 0, /* static_pass_number */
235 TV_TREE_CFG, /* tv_id */
236 PROP_gimple_leh, /* properties_required */
237 PROP_cfg, /* properties_provided */
238 0, /* properties_destroyed */
239 0, /* todo_flags_start */
240 TODO_verify_stmts, /* todo_flags_finish */
241 0 /* letter */
244 /* Search the CFG for any computed gotos. If found, factor them to a
245 common computed goto site. Also record the location of that site so
246 that we can un-factor the gotos after we have converted back to
247 normal form. */
249 static void
250 factor_computed_gotos (void)
252 basic_block bb;
253 tree factored_label_decl = NULL;
254 tree var = NULL;
255 tree factored_computed_goto_label = NULL;
256 tree factored_computed_goto = NULL;
258 /* We know there are one or more computed gotos in this function.
259 Examine the last statement in each basic block to see if the block
260 ends with a computed goto. */
262 FOR_EACH_BB (bb)
264 block_stmt_iterator bsi = bsi_last (bb);
265 tree last;
267 if (bsi_end_p (bsi))
268 continue;
269 last = bsi_stmt (bsi);
271 /* Ignore the computed goto we create when we factor the original
272 computed gotos. */
273 if (last == factored_computed_goto)
274 continue;
276 /* If the last statement is a computed goto, factor it. */
277 if (computed_goto_p (last))
279 tree assignment;
281 /* The first time we find a computed goto we need to create
282 the factored goto block and the variable each original
283 computed goto will use for their goto destination. */
284 if (! factored_computed_goto)
286 basic_block new_bb = create_empty_bb (bb);
287 block_stmt_iterator new_bsi = bsi_start (new_bb);
289 /* Create the destination of the factored goto. Each original
290 computed goto will put its desired destination into this
291 variable and jump to the label we create immediately
292 below. */
293 var = create_tmp_var (ptr_type_node, "gotovar");
295 /* Build a label for the new block which will contain the
296 factored computed goto. */
297 factored_label_decl = create_artificial_label ();
298 factored_computed_goto_label
299 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
300 bsi_insert_after (&new_bsi, factored_computed_goto_label,
301 BSI_NEW_STMT);
303 /* Build our new computed goto. */
304 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
305 bsi_insert_after (&new_bsi, factored_computed_goto,
306 BSI_NEW_STMT);
309 /* Copy the original computed goto's destination into VAR. */
310 assignment = build (MODIFY_EXPR, ptr_type_node,
311 var, GOTO_DESTINATION (last));
312 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
314 /* And re-vector the computed goto to the new destination. */
315 GOTO_DESTINATION (last) = factored_label_decl;
321 /* Create annotations for a single basic block. */
323 static void
324 create_block_annotation (basic_block bb)
326 /* Verify that the tree_annotations field is clear. */
327 gcc_assert (!bb->tree_annotations);
328 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
332 /* Free the annotations for all the basic blocks. */
334 static void free_blocks_annotations (void)
336 clear_blocks_annotations ();
340 /* Clear the annotations for all the basic blocks. */
342 static void
343 clear_blocks_annotations (void)
345 basic_block bb;
347 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
348 bb->tree_annotations = NULL;
352 /* Build a flowgraph for the statement_list STMT_LIST. */
354 static void
355 make_blocks (tree stmt_list)
357 tree_stmt_iterator i = tsi_start (stmt_list);
358 tree stmt = NULL;
359 bool start_new_block = true;
360 bool first_stmt_of_list = true;
361 basic_block bb = ENTRY_BLOCK_PTR;
363 while (!tsi_end_p (i))
365 tree prev_stmt;
367 prev_stmt = stmt;
368 stmt = tsi_stmt (i);
370 /* If the statement starts a new basic block or if we have determined
371 in a previous pass that we need to create a new block for STMT, do
372 so now. */
373 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
375 if (!first_stmt_of_list)
376 stmt_list = tsi_split_statement_list_before (&i);
377 bb = create_basic_block (stmt_list, NULL, bb);
378 start_new_block = false;
381 /* Now add STMT to BB and create the subgraphs for special statement
382 codes. */
383 set_bb_for_stmt (stmt, bb);
385 if (computed_goto_p (stmt))
386 found_computed_goto = true;
388 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
389 next iteration. */
390 if (stmt_ends_bb_p (stmt))
391 start_new_block = true;
393 tsi_next (&i);
394 first_stmt_of_list = false;
399 /* Create and return a new empty basic block after bb AFTER. */
401 static basic_block
402 create_bb (void *h, void *e, basic_block after)
404 basic_block bb;
406 gcc_assert (!e);
408 /* Create and initialize a new basic block. Since alloc_block uses
409 ggc_alloc_cleared to allocate a basic block, we do not have to
410 clear the newly allocated basic block here. */
411 bb = alloc_block ();
413 bb->index = last_basic_block;
414 bb->flags = BB_NEW;
415 bb->stmt_list = h ? h : alloc_stmt_list ();
417 /* Add the new block to the linked list of blocks. */
418 link_block (bb, after);
420 /* Grow the basic block array if needed. */
421 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
423 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
424 VARRAY_GROW (basic_block_info, new_size);
427 /* Add the newly created block to the array. */
428 BASIC_BLOCK (last_basic_block) = bb;
430 create_block_annotation (bb);
432 n_basic_blocks++;
433 last_basic_block++;
435 initialize_bb_rbi (bb);
436 return bb;
440 /*---------------------------------------------------------------------------
441 Edge creation
442 ---------------------------------------------------------------------------*/
444 /* Fold COND_EXPR_COND of each COND_EXPR. */
446 static void
447 fold_cond_expr_cond (void)
449 basic_block bb;
451 FOR_EACH_BB (bb)
453 tree stmt = last_stmt (bb);
455 if (stmt
456 && TREE_CODE (stmt) == COND_EXPR)
458 tree cond = fold (COND_EXPR_COND (stmt));
459 if (integer_zerop (cond))
460 COND_EXPR_COND (stmt) = boolean_false_node;
461 else if (integer_onep (cond))
462 COND_EXPR_COND (stmt) = boolean_true_node;
467 /* Join all the blocks in the flowgraph. */
469 static void
470 make_edges (void)
472 basic_block bb;
474 /* Create an edge from entry to the first block with executable
475 statements in it. */
476 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
478 /* Traverse the basic block array placing edges. */
479 FOR_EACH_BB (bb)
481 tree first = first_stmt (bb);
482 tree last = last_stmt (bb);
484 if (first)
486 /* Edges for statements that always alter flow control. */
487 if (is_ctrl_stmt (last))
488 make_ctrl_stmt_edges (bb);
490 /* Edges for statements that sometimes alter flow control. */
491 if (is_ctrl_altering_stmt (last))
492 make_exit_edges (bb);
495 /* Finally, if no edges were created above, this is a regular
496 basic block that only needs a fallthru edge. */
497 if (EDGE_COUNT (bb->succs) == 0)
498 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
501 /* We do not care about fake edges, so remove any that the CFG
502 builder inserted for completeness. */
503 remove_fake_exit_edges ();
505 /* Fold COND_EXPR_COND of each COND_EXPR. */
506 fold_cond_expr_cond ();
508 /* Clean up the graph and warn for unreachable code. */
509 cleanup_tree_cfg ();
513 /* Create edges for control statement at basic block BB. */
515 static void
516 make_ctrl_stmt_edges (basic_block bb)
518 tree last = last_stmt (bb);
520 gcc_assert (last);
521 switch (TREE_CODE (last))
523 case GOTO_EXPR:
524 make_goto_expr_edges (bb);
525 break;
527 case RETURN_EXPR:
528 make_edge (bb, EXIT_BLOCK_PTR, 0);
529 break;
531 case COND_EXPR:
532 make_cond_expr_edges (bb);
533 break;
535 case SWITCH_EXPR:
536 make_switch_expr_edges (bb);
537 break;
539 case RESX_EXPR:
540 make_eh_edges (last);
541 /* Yet another NORETURN hack. */
542 if (EDGE_COUNT (bb->succs) == 0)
543 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
544 break;
546 default:
547 gcc_unreachable ();
552 /* Create exit edges for statements in block BB that alter the flow of
553 control. Statements that alter the control flow are 'goto', 'return'
554 and calls to non-returning functions. */
556 static void
557 make_exit_edges (basic_block bb)
559 tree last = last_stmt (bb), op;
561 gcc_assert (last);
562 switch (TREE_CODE (last))
564 case CALL_EXPR:
565 /* If this function receives a nonlocal goto, then we need to
566 make edges from this call site to all the nonlocal goto
567 handlers. */
568 if (TREE_SIDE_EFFECTS (last)
569 && current_function_has_nonlocal_label)
570 make_goto_expr_edges (bb);
572 /* If this statement has reachable exception handlers, then
573 create abnormal edges to them. */
574 make_eh_edges (last);
576 /* Some calls are known not to return. For such calls we create
577 a fake edge.
579 We really need to revamp how we build edges so that it's not
580 such a bloody pain to avoid creating edges for this case since
581 all we do is remove these edges when we're done building the
582 CFG. */
583 if (call_expr_flags (last) & ECF_NORETURN)
585 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
586 return;
589 /* Don't forget the fall-thru edge. */
590 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
591 break;
593 case MODIFY_EXPR:
594 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
595 may have an abnormal edge. Search the RHS for this case and
596 create any required edges. */
597 op = get_call_expr_in (last);
598 if (op && TREE_SIDE_EFFECTS (op)
599 && current_function_has_nonlocal_label)
600 make_goto_expr_edges (bb);
602 make_eh_edges (last);
603 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
604 break;
606 default:
607 gcc_unreachable ();
612 /* Create the edges for a COND_EXPR starting at block BB.
613 At this point, both clauses must contain only simple gotos. */
615 static void
616 make_cond_expr_edges (basic_block bb)
618 tree entry = last_stmt (bb);
619 basic_block then_bb, else_bb;
620 tree then_label, else_label;
622 gcc_assert (entry);
623 gcc_assert (TREE_CODE (entry) == COND_EXPR);
625 /* Entry basic blocks for each component. */
626 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
627 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
628 then_bb = label_to_block (then_label);
629 else_bb = label_to_block (else_label);
631 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
632 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
635 /* Hashing routine for EDGE_TO_CASES. */
637 static hashval_t
638 edge_to_cases_hash (const void *p)
640 edge e = ((struct edge_to_cases_elt *)p)->e;
642 /* Hash on the edge itself (which is a pointer). */
643 return htab_hash_pointer (e);
646 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
647 for equality is just a pointer comparison. */
649 static int
650 edge_to_cases_eq (const void *p1, const void *p2)
652 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
653 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
655 return e1 == e2;
658 /* Called for each element in the hash table (P) as we delete the
659 edge to cases hash table.
661 Clear all the TREE_CHAINs to prevent problems with copying of
662 SWITCH_EXPRs and structure sharing rules, then free the hash table
663 element. */
665 static void
666 edge_to_cases_cleanup (void *p)
668 struct edge_to_cases_elt *elt = p;
669 tree t, next;
671 for (t = elt->case_labels; t; t = next)
673 next = TREE_CHAIN (t);
674 TREE_CHAIN (t) = NULL;
676 free (p);
679 /* Start recording information mapping edges to case labels. */
681 static void
682 start_recording_case_labels (void)
684 gcc_assert (edge_to_cases == NULL);
686 edge_to_cases = htab_create (37,
687 edge_to_cases_hash,
688 edge_to_cases_eq,
689 edge_to_cases_cleanup);
692 /* Return nonzero if we are recording information for case labels. */
694 static bool
695 recording_case_labels_p (void)
697 return (edge_to_cases != NULL);
700 /* Stop recording information mapping edges to case labels and
701 remove any information we have recorded. */
702 static void
703 end_recording_case_labels (void)
705 htab_delete (edge_to_cases);
706 edge_to_cases = NULL;
709 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
711 static void
712 record_switch_edge (edge e, tree case_label)
714 struct edge_to_cases_elt *elt;
715 void **slot;
717 /* Build a hash table element so we can see if E is already
718 in the table. */
719 elt = xmalloc (sizeof (struct edge_to_cases_elt));
720 elt->e = e;
721 elt->case_labels = case_label;
723 slot = htab_find_slot (edge_to_cases, elt, INSERT);
725 if (*slot == NULL)
727 /* E was not in the hash table. Install E into the hash table. */
728 *slot = (void *)elt;
730 else
732 /* E was already in the hash table. Free ELT as we do not need it
733 anymore. */
734 free (elt);
736 /* Get the entry stored in the hash table. */
737 elt = (struct edge_to_cases_elt *) *slot;
739 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
740 TREE_CHAIN (case_label) = elt->case_labels;
741 elt->case_labels = case_label;
745 /* If we are inside a {start,end}_recording_cases block, then return
746 a chain of CASE_LABEL_EXPRs from T which reference E.
748 Otherwise return NULL. */
750 static tree
751 get_cases_for_edge (edge e, tree t)
753 struct edge_to_cases_elt elt, *elt_p;
754 void **slot;
755 size_t i, n;
756 tree vec;
758 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
759 chains available. Return NULL so the caller can detect this case. */
760 if (!recording_case_labels_p ())
761 return NULL;
763 restart:
764 elt.e = e;
765 elt.case_labels = NULL;
766 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
768 if (slot)
770 elt_p = (struct edge_to_cases_elt *)*slot;
771 return elt_p->case_labels;
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
778 vec = SWITCH_LABELS (t);
779 n = TREE_VEC_LENGTH (vec);
780 for (i = 0; i < n; i++)
782 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
783 basic_block label_bb = label_to_block (lab);
784 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
786 goto restart;
789 /* Create the edges for a SWITCH_EXPR starting at block BB.
790 At this point, the switch body has been lowered and the
791 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
793 static void
794 make_switch_expr_edges (basic_block bb)
796 tree entry = last_stmt (bb);
797 size_t i, n;
798 tree vec;
800 vec = SWITCH_LABELS (entry);
801 n = TREE_VEC_LENGTH (vec);
803 for (i = 0; i < n; ++i)
805 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
806 basic_block label_bb = label_to_block (lab);
807 make_edge (bb, label_bb, 0);
812 /* Return the basic block holding label DEST. */
814 basic_block
815 label_to_block (tree dest)
817 int uid = LABEL_DECL_UID (dest);
819 /* We would die hard when faced by an undefined label. Emit a label to
820 the very first basic block. This will hopefully make even the dataflow
821 and undefined variable warnings quite right. */
822 if ((errorcount || sorrycount) && uid < 0)
824 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
825 tree stmt;
827 stmt = build1 (LABEL_EXPR, void_type_node, dest);
828 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
829 uid = LABEL_DECL_UID (dest);
831 return VARRAY_BB (label_to_block_map, uid);
835 /* Create edges for a goto statement at block BB. */
837 static void
838 make_goto_expr_edges (basic_block bb)
840 tree goto_t;
841 basic_block target_bb;
842 int for_call;
843 block_stmt_iterator last = bsi_last (bb);
845 goto_t = bsi_stmt (last);
847 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
848 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
849 from a nonlocal goto. */
850 if (TREE_CODE (goto_t) != GOTO_EXPR)
851 for_call = 1;
852 else
854 tree dest = GOTO_DESTINATION (goto_t);
855 for_call = 0;
857 /* A GOTO to a local label creates normal edges. */
858 if (simple_goto_p (goto_t))
860 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
861 #ifdef USE_MAPPED_LOCATION
862 e->goto_locus = EXPR_LOCATION (goto_t);
863 #else
864 e->goto_locus = EXPR_LOCUS (goto_t);
865 #endif
866 bsi_remove (&last);
867 return;
870 /* Nothing more to do for nonlocal gotos. */
871 if (TREE_CODE (dest) == LABEL_DECL)
872 return;
874 /* Computed gotos remain. */
877 /* Look for the block starting with the destination label. In the
878 case of a computed goto, make an edge to any label block we find
879 in the CFG. */
880 FOR_EACH_BB (target_bb)
882 block_stmt_iterator bsi;
884 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
886 tree target = bsi_stmt (bsi);
888 if (TREE_CODE (target) != LABEL_EXPR)
889 break;
891 if (
892 /* Computed GOTOs. Make an edge to every label block that has
893 been marked as a potential target for a computed goto. */
894 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
895 /* Nonlocal GOTO target. Make an edge to every label block
896 that has been marked as a potential target for a nonlocal
897 goto. */
898 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
900 make_edge (bb, target_bb, EDGE_ABNORMAL);
901 break;
906 /* Degenerate case of computed goto with no labels. */
907 if (!for_call && EDGE_COUNT (bb->succs) == 0)
908 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
912 /*---------------------------------------------------------------------------
913 Flowgraph analysis
914 ---------------------------------------------------------------------------*/
916 /* Remove unreachable blocks and other miscellaneous clean up work. */
918 bool
919 cleanup_tree_cfg (void)
921 bool retval = false;
923 timevar_push (TV_TREE_CLEANUP_CFG);
925 retval = cleanup_control_flow ();
926 retval |= delete_unreachable_blocks ();
928 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
929 which can get expensive. So we want to enable recording of edge
930 to CASE_LABEL_EXPR mappings around the call to
931 cleanup_forwarder_blocks. */
932 start_recording_case_labels ();
933 retval |= cleanup_forwarder_blocks ();
934 end_recording_case_labels ();
936 #ifdef ENABLE_CHECKING
937 if (retval)
939 gcc_assert (!cleanup_control_flow ());
940 gcc_assert (!delete_unreachable_blocks ());
941 gcc_assert (!cleanup_forwarder_blocks ());
943 #endif
945 /* Merging the blocks creates no new opportunities for the other
946 optimizations, so do it here. */
947 retval |= merge_seq_blocks ();
949 compact_blocks ();
951 #ifdef ENABLE_CHECKING
952 verify_flow_info ();
953 #endif
954 timevar_pop (TV_TREE_CLEANUP_CFG);
955 return retval;
959 /* Cleanup cfg and repair loop structures. */
961 void
962 cleanup_tree_cfg_loop (void)
964 bitmap changed_bbs = BITMAP_ALLOC (NULL);
966 cleanup_tree_cfg ();
968 fix_loop_structure (current_loops, changed_bbs);
969 calculate_dominance_info (CDI_DOMINATORS);
971 /* This usually does nothing. But sometimes parts of cfg that originally
972 were inside a loop get out of it due to edge removal (since they
973 become unreachable by back edges from latch). */
974 rewrite_into_loop_closed_ssa (changed_bbs);
976 BITMAP_FREE (changed_bbs);
978 #ifdef ENABLE_CHECKING
979 verify_loop_structure (current_loops);
980 #endif
983 /* Cleanup useless labels in basic blocks. This is something we wish
984 to do early because it allows us to group case labels before creating
985 the edges for the CFG, and it speeds up block statement iterators in
986 all passes later on.
987 We only run this pass once, running it more than once is probably not
988 profitable. */
990 /* A map from basic block index to the leading label of that block. */
991 static tree *label_for_bb;
993 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
994 static void
995 update_eh_label (struct eh_region *region)
997 tree old_label = get_eh_region_tree_label (region);
998 if (old_label)
1000 tree new_label;
1001 basic_block bb = label_to_block (old_label);
1003 /* ??? After optimizing, there may be EH regions with labels
1004 that have already been removed from the function body, so
1005 there is no basic block for them. */
1006 if (! bb)
1007 return;
1009 new_label = label_for_bb[bb->index];
1010 set_eh_region_tree_label (region, new_label);
1014 /* Given LABEL return the first label in the same basic block. */
1015 static tree
1016 main_block_label (tree label)
1018 basic_block bb = label_to_block (label);
1020 /* label_to_block possibly inserted undefined label into the chain. */
1021 if (!label_for_bb[bb->index])
1022 label_for_bb[bb->index] = label;
1023 return label_for_bb[bb->index];
1026 /* Cleanup redundant labels. This is a three-step process:
1027 1) Find the leading label for each block.
1028 2) Redirect all references to labels to the leading labels.
1029 3) Cleanup all useless labels. */
1031 void
1032 cleanup_dead_labels (void)
1034 basic_block bb;
1035 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1037 /* Find a suitable label for each block. We use the first user-defined
1038 label if there is one, or otherwise just the first label we see. */
1039 FOR_EACH_BB (bb)
1041 block_stmt_iterator i;
1043 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1045 tree label, stmt = bsi_stmt (i);
1047 if (TREE_CODE (stmt) != LABEL_EXPR)
1048 break;
1050 label = LABEL_EXPR_LABEL (stmt);
1052 /* If we have not yet seen a label for the current block,
1053 remember this one and see if there are more labels. */
1054 if (! label_for_bb[bb->index])
1056 label_for_bb[bb->index] = label;
1057 continue;
1060 /* If we did see a label for the current block already, but it
1061 is an artificially created label, replace it if the current
1062 label is a user defined label. */
1063 if (! DECL_ARTIFICIAL (label)
1064 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1066 label_for_bb[bb->index] = label;
1067 break;
1072 /* Now redirect all jumps/branches to the selected label.
1073 First do so for each block ending in a control statement. */
1074 FOR_EACH_BB (bb)
1076 tree stmt = last_stmt (bb);
1077 if (!stmt)
1078 continue;
1080 switch (TREE_CODE (stmt))
1082 case COND_EXPR:
1084 tree true_branch, false_branch;
1086 true_branch = COND_EXPR_THEN (stmt);
1087 false_branch = COND_EXPR_ELSE (stmt);
1089 GOTO_DESTINATION (true_branch)
1090 = main_block_label (GOTO_DESTINATION (true_branch));
1091 GOTO_DESTINATION (false_branch)
1092 = main_block_label (GOTO_DESTINATION (false_branch));
1094 break;
1097 case SWITCH_EXPR:
1099 size_t i;
1100 tree vec = SWITCH_LABELS (stmt);
1101 size_t n = TREE_VEC_LENGTH (vec);
1103 /* Replace all destination labels. */
1104 for (i = 0; i < n; ++i)
1106 tree elt = TREE_VEC_ELT (vec, i);
1107 tree label = main_block_label (CASE_LABEL (elt));
1108 CASE_LABEL (elt) = label;
1110 break;
1113 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1114 remove them until after we've created the CFG edges. */
1115 case GOTO_EXPR:
1116 if (! computed_goto_p (stmt))
1118 GOTO_DESTINATION (stmt)
1119 = main_block_label (GOTO_DESTINATION (stmt));
1120 break;
1123 default:
1124 break;
1128 for_each_eh_region (update_eh_label);
1130 /* Finally, purge dead labels. All user-defined labels and labels that
1131 can be the target of non-local gotos are preserved. */
1132 FOR_EACH_BB (bb)
1134 block_stmt_iterator i;
1135 tree label_for_this_bb = label_for_bb[bb->index];
1137 if (! label_for_this_bb)
1138 continue;
1140 for (i = bsi_start (bb); !bsi_end_p (i); )
1142 tree label, stmt = bsi_stmt (i);
1144 if (TREE_CODE (stmt) != LABEL_EXPR)
1145 break;
1147 label = LABEL_EXPR_LABEL (stmt);
1149 if (label == label_for_this_bb
1150 || ! DECL_ARTIFICIAL (label)
1151 || DECL_NONLOCAL (label))
1152 bsi_next (&i);
1153 else
1154 bsi_remove (&i);
1158 free (label_for_bb);
1161 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1162 and scan the sorted vector of cases. Combine the ones jumping to the
1163 same label.
1164 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1166 void
1167 group_case_labels (void)
1169 basic_block bb;
1171 FOR_EACH_BB (bb)
1173 tree stmt = last_stmt (bb);
1174 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1176 tree labels = SWITCH_LABELS (stmt);
1177 int old_size = TREE_VEC_LENGTH (labels);
1178 int i, j, new_size = old_size;
1179 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1180 tree default_label;
1182 /* The default label is always the last case in a switch
1183 statement after gimplification. */
1184 default_label = CASE_LABEL (default_case);
1186 /* Look for possible opportunities to merge cases.
1187 Ignore the last element of the label vector because it
1188 must be the default case. */
1189 i = 0;
1190 while (i < old_size - 1)
1192 tree base_case, base_label, base_high;
1193 base_case = TREE_VEC_ELT (labels, i);
1195 gcc_assert (base_case);
1196 base_label = CASE_LABEL (base_case);
1198 /* Discard cases that have the same destination as the
1199 default case. */
1200 if (base_label == default_label)
1202 TREE_VEC_ELT (labels, i) = NULL_TREE;
1203 i++;
1204 new_size--;
1205 continue;
1208 base_high = CASE_HIGH (base_case) ?
1209 CASE_HIGH (base_case) : CASE_LOW (base_case);
1210 i++;
1211 /* Try to merge case labels. Break out when we reach the end
1212 of the label vector or when we cannot merge the next case
1213 label with the current one. */
1214 while (i < old_size - 1)
1216 tree merge_case = TREE_VEC_ELT (labels, i);
1217 tree merge_label = CASE_LABEL (merge_case);
1218 tree t = int_const_binop (PLUS_EXPR, base_high,
1219 integer_one_node, 1);
1221 /* Merge the cases if they jump to the same place,
1222 and their ranges are consecutive. */
1223 if (merge_label == base_label
1224 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1226 base_high = CASE_HIGH (merge_case) ?
1227 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1228 CASE_HIGH (base_case) = base_high;
1229 TREE_VEC_ELT (labels, i) = NULL_TREE;
1230 new_size--;
1231 i++;
1233 else
1234 break;
1238 /* Compress the case labels in the label vector, and adjust the
1239 length of the vector. */
1240 for (i = 0, j = 0; i < new_size; i++)
1242 while (! TREE_VEC_ELT (labels, j))
1243 j++;
1244 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1246 TREE_VEC_LENGTH (labels) = new_size;
1251 /* Checks whether we can merge block B into block A. */
1253 static bool
1254 tree_can_merge_blocks_p (basic_block a, basic_block b)
1256 tree stmt;
1257 block_stmt_iterator bsi;
1259 if (!single_succ_p (a))
1260 return false;
1262 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1263 return false;
1265 if (single_succ (a) != b)
1266 return false;
1268 if (!single_pred_p (b))
1269 return false;
1271 if (b == EXIT_BLOCK_PTR)
1272 return false;
1274 /* If A ends by a statement causing exceptions or something similar, we
1275 cannot merge the blocks. */
1276 stmt = last_stmt (a);
1277 if (stmt && stmt_ends_bb_p (stmt))
1278 return false;
1280 /* Do not allow a block with only a non-local label to be merged. */
1281 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1282 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1283 return false;
1285 /* There may be no phi nodes at the start of b. Most of these degenerate
1286 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1287 if (phi_nodes (b))
1288 return false;
1290 /* Do not remove user labels. */
1291 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1293 stmt = bsi_stmt (bsi);
1294 if (TREE_CODE (stmt) != LABEL_EXPR)
1295 break;
1296 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1297 return false;
1300 /* Protect the loop latches. */
1301 if (current_loops
1302 && b->loop_father->latch == b)
1303 return false;
1305 return true;
1309 /* Merge block B into block A. */
1311 static void
1312 tree_merge_blocks (basic_block a, basic_block b)
1314 block_stmt_iterator bsi;
1315 tree_stmt_iterator last;
1317 if (dump_file)
1318 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1320 /* Ensure that B follows A. */
1321 move_block_after (b, a);
1323 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1324 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1326 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1327 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1329 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1331 tree label = bsi_stmt (bsi);
1333 bsi_remove (&bsi);
1334 /* Now that we can thread computed gotos, we might have
1335 a situation where we have a forced label in block B
1336 However, the label at the start of block B might still be
1337 used in other ways (think about the runtime checking for
1338 Fortran assigned gotos). So we can not just delete the
1339 label. Instead we move the label to the start of block A. */
1340 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1342 block_stmt_iterator dest_bsi = bsi_start (a);
1343 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1346 else
1348 set_bb_for_stmt (bsi_stmt (bsi), a);
1349 bsi_next (&bsi);
1353 /* Merge the chains. */
1354 last = tsi_last (a->stmt_list);
1355 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1356 b->stmt_list = NULL;
1360 /* Walk the function tree removing unnecessary statements.
1362 * Empty statement nodes are removed
1364 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1366 * Unnecessary COND_EXPRs are removed
1368 * Some unnecessary BIND_EXPRs are removed
1370 Clearly more work could be done. The trick is doing the analysis
1371 and removal fast enough to be a net improvement in compile times.
1373 Note that when we remove a control structure such as a COND_EXPR
1374 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1375 to ensure we eliminate all the useless code. */
1377 struct rus_data
1379 tree *last_goto;
1380 bool repeat;
1381 bool may_throw;
1382 bool may_branch;
1383 bool has_label;
1386 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1388 static bool
1389 remove_useless_stmts_warn_notreached (tree stmt)
1391 if (EXPR_HAS_LOCATION (stmt))
1393 location_t loc = EXPR_LOCATION (stmt);
1394 if (LOCATION_LINE (loc) > 0)
1396 warning ("%Hwill never be executed", &loc);
1397 return true;
1401 switch (TREE_CODE (stmt))
1403 case STATEMENT_LIST:
1405 tree_stmt_iterator i;
1406 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1407 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1408 return true;
1410 break;
1412 case COND_EXPR:
1413 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1414 return true;
1415 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1416 return true;
1417 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1418 return true;
1419 break;
1421 case TRY_FINALLY_EXPR:
1422 case TRY_CATCH_EXPR:
1423 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1424 return true;
1425 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1426 return true;
1427 break;
1429 case CATCH_EXPR:
1430 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1431 case EH_FILTER_EXPR:
1432 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1433 case BIND_EXPR:
1434 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1436 default:
1437 /* Not a live container. */
1438 break;
1441 return false;
1444 static void
1445 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1447 tree then_clause, else_clause, cond;
1448 bool save_has_label, then_has_label, else_has_label;
1450 save_has_label = data->has_label;
1451 data->has_label = false;
1452 data->last_goto = NULL;
1454 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1456 then_has_label = data->has_label;
1457 data->has_label = false;
1458 data->last_goto = NULL;
1460 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1462 else_has_label = data->has_label;
1463 data->has_label = save_has_label | then_has_label | else_has_label;
1465 then_clause = COND_EXPR_THEN (*stmt_p);
1466 else_clause = COND_EXPR_ELSE (*stmt_p);
1467 cond = fold (COND_EXPR_COND (*stmt_p));
1469 /* If neither arm does anything at all, we can remove the whole IF. */
1470 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1472 *stmt_p = build_empty_stmt ();
1473 data->repeat = true;
1476 /* If there are no reachable statements in an arm, then we can
1477 zap the entire conditional. */
1478 else if (integer_nonzerop (cond) && !else_has_label)
1480 if (warn_notreached)
1481 remove_useless_stmts_warn_notreached (else_clause);
1482 *stmt_p = then_clause;
1483 data->repeat = true;
1485 else if (integer_zerop (cond) && !then_has_label)
1487 if (warn_notreached)
1488 remove_useless_stmts_warn_notreached (then_clause);
1489 *stmt_p = else_clause;
1490 data->repeat = true;
1493 /* Check a couple of simple things on then/else with single stmts. */
1494 else
1496 tree then_stmt = expr_only (then_clause);
1497 tree else_stmt = expr_only (else_clause);
1499 /* Notice branches to a common destination. */
1500 if (then_stmt && else_stmt
1501 && TREE_CODE (then_stmt) == GOTO_EXPR
1502 && TREE_CODE (else_stmt) == GOTO_EXPR
1503 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1505 *stmt_p = then_stmt;
1506 data->repeat = true;
1509 /* If the THEN/ELSE clause merely assigns a value to a variable or
1510 parameter which is already known to contain that value, then
1511 remove the useless THEN/ELSE clause. */
1512 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1514 if (else_stmt
1515 && TREE_CODE (else_stmt) == MODIFY_EXPR
1516 && TREE_OPERAND (else_stmt, 0) == cond
1517 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1518 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1520 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1521 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1522 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1523 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1525 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1526 ? then_stmt : else_stmt);
1527 tree *location = (TREE_CODE (cond) == EQ_EXPR
1528 ? &COND_EXPR_THEN (*stmt_p)
1529 : &COND_EXPR_ELSE (*stmt_p));
1531 if (stmt
1532 && TREE_CODE (stmt) == MODIFY_EXPR
1533 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1534 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1535 *location = alloc_stmt_list ();
1539 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1540 would be re-introduced during lowering. */
1541 data->last_goto = NULL;
1545 static void
1546 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1548 bool save_may_branch, save_may_throw;
1549 bool this_may_branch, this_may_throw;
1551 /* Collect may_branch and may_throw information for the body only. */
1552 save_may_branch = data->may_branch;
1553 save_may_throw = data->may_throw;
1554 data->may_branch = false;
1555 data->may_throw = false;
1556 data->last_goto = NULL;
1558 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1560 this_may_branch = data->may_branch;
1561 this_may_throw = data->may_throw;
1562 data->may_branch |= save_may_branch;
1563 data->may_throw |= save_may_throw;
1564 data->last_goto = NULL;
1566 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1568 /* If the body is empty, then we can emit the FINALLY block without
1569 the enclosing TRY_FINALLY_EXPR. */
1570 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1572 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1573 data->repeat = true;
1576 /* If the handler is empty, then we can emit the TRY block without
1577 the enclosing TRY_FINALLY_EXPR. */
1578 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1580 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1581 data->repeat = true;
1584 /* If the body neither throws, nor branches, then we can safely
1585 string the TRY and FINALLY blocks together. */
1586 else if (!this_may_branch && !this_may_throw)
1588 tree stmt = *stmt_p;
1589 *stmt_p = TREE_OPERAND (stmt, 0);
1590 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1591 data->repeat = true;
1596 static void
1597 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1599 bool save_may_throw, this_may_throw;
1600 tree_stmt_iterator i;
1601 tree stmt;
1603 /* Collect may_throw information for the body only. */
1604 save_may_throw = data->may_throw;
1605 data->may_throw = false;
1606 data->last_goto = NULL;
1608 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1610 this_may_throw = data->may_throw;
1611 data->may_throw = save_may_throw;
1613 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1614 if (!this_may_throw)
1616 if (warn_notreached)
1617 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1618 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1619 data->repeat = true;
1620 return;
1623 /* Process the catch clause specially. We may be able to tell that
1624 no exceptions propagate past this point. */
1626 this_may_throw = true;
1627 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1628 stmt = tsi_stmt (i);
1629 data->last_goto = NULL;
1631 switch (TREE_CODE (stmt))
1633 case CATCH_EXPR:
1634 for (; !tsi_end_p (i); tsi_next (&i))
1636 stmt = tsi_stmt (i);
1637 /* If we catch all exceptions, then the body does not
1638 propagate exceptions past this point. */
1639 if (CATCH_TYPES (stmt) == NULL)
1640 this_may_throw = false;
1641 data->last_goto = NULL;
1642 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1644 break;
1646 case EH_FILTER_EXPR:
1647 if (EH_FILTER_MUST_NOT_THROW (stmt))
1648 this_may_throw = false;
1649 else if (EH_FILTER_TYPES (stmt) == NULL)
1650 this_may_throw = false;
1651 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1652 break;
1654 default:
1655 /* Otherwise this is a cleanup. */
1656 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1658 /* If the cleanup is empty, then we can emit the TRY block without
1659 the enclosing TRY_CATCH_EXPR. */
1660 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1662 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1663 data->repeat = true;
1665 break;
1667 data->may_throw |= this_may_throw;
1671 static void
1672 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1674 tree block;
1676 /* First remove anything underneath the BIND_EXPR. */
1677 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1679 /* If the BIND_EXPR has no variables, then we can pull everything
1680 up one level and remove the BIND_EXPR, unless this is the toplevel
1681 BIND_EXPR for the current function or an inlined function.
1683 When this situation occurs we will want to apply this
1684 optimization again. */
1685 block = BIND_EXPR_BLOCK (*stmt_p);
1686 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1687 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1688 && (! block
1689 || ! BLOCK_ABSTRACT_ORIGIN (block)
1690 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1691 != FUNCTION_DECL)))
1693 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1694 data->repeat = true;
1699 static void
1700 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1702 tree dest = GOTO_DESTINATION (*stmt_p);
1704 data->may_branch = true;
1705 data->last_goto = NULL;
1707 /* Record the last goto expr, so that we can delete it if unnecessary. */
1708 if (TREE_CODE (dest) == LABEL_DECL)
1709 data->last_goto = stmt_p;
1713 static void
1714 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1716 tree label = LABEL_EXPR_LABEL (*stmt_p);
1718 data->has_label = true;
1720 /* We do want to jump across non-local label receiver code. */
1721 if (DECL_NONLOCAL (label))
1722 data->last_goto = NULL;
1724 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1726 *data->last_goto = build_empty_stmt ();
1727 data->repeat = true;
1730 /* ??? Add something here to delete unused labels. */
1734 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1735 decl. This allows us to eliminate redundant or useless
1736 calls to "const" functions.
1738 Gimplifier already does the same operation, but we may notice functions
1739 being const and pure once their calls has been gimplified, so we need
1740 to update the flag. */
1742 static void
1743 update_call_expr_flags (tree call)
1745 tree decl = get_callee_fndecl (call);
1746 if (!decl)
1747 return;
1748 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1749 TREE_SIDE_EFFECTS (call) = 0;
1750 if (TREE_NOTHROW (decl))
1751 TREE_NOTHROW (call) = 1;
1755 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1757 void
1758 notice_special_calls (tree t)
1760 int flags = call_expr_flags (t);
1762 if (flags & ECF_MAY_BE_ALLOCA)
1763 current_function_calls_alloca = true;
1764 if (flags & ECF_RETURNS_TWICE)
1765 current_function_calls_setjmp = true;
1769 /* Clear flags set by notice_special_calls. Used by dead code removal
1770 to update the flags. */
1772 void
1773 clear_special_calls (void)
1775 current_function_calls_alloca = false;
1776 current_function_calls_setjmp = false;
1780 static void
1781 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1783 tree t = *tp, op;
1785 switch (TREE_CODE (t))
1787 case COND_EXPR:
1788 remove_useless_stmts_cond (tp, data);
1789 break;
1791 case TRY_FINALLY_EXPR:
1792 remove_useless_stmts_tf (tp, data);
1793 break;
1795 case TRY_CATCH_EXPR:
1796 remove_useless_stmts_tc (tp, data);
1797 break;
1799 case BIND_EXPR:
1800 remove_useless_stmts_bind (tp, data);
1801 break;
1803 case GOTO_EXPR:
1804 remove_useless_stmts_goto (tp, data);
1805 break;
1807 case LABEL_EXPR:
1808 remove_useless_stmts_label (tp, data);
1809 break;
1811 case RETURN_EXPR:
1812 fold_stmt (tp);
1813 data->last_goto = NULL;
1814 data->may_branch = true;
1815 break;
1817 case CALL_EXPR:
1818 fold_stmt (tp);
1819 data->last_goto = NULL;
1820 notice_special_calls (t);
1821 update_call_expr_flags (t);
1822 if (tree_could_throw_p (t))
1823 data->may_throw = true;
1824 break;
1826 case MODIFY_EXPR:
1827 data->last_goto = NULL;
1828 fold_stmt (tp);
1829 op = get_call_expr_in (t);
1830 if (op)
1832 update_call_expr_flags (op);
1833 notice_special_calls (op);
1835 if (tree_could_throw_p (t))
1836 data->may_throw = true;
1837 break;
1839 case STATEMENT_LIST:
1841 tree_stmt_iterator i = tsi_start (t);
1842 while (!tsi_end_p (i))
1844 t = tsi_stmt (i);
1845 if (IS_EMPTY_STMT (t))
1847 tsi_delink (&i);
1848 continue;
1851 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1853 t = tsi_stmt (i);
1854 if (TREE_CODE (t) == STATEMENT_LIST)
1856 tsi_link_before (&i, t, TSI_SAME_STMT);
1857 tsi_delink (&i);
1859 else
1860 tsi_next (&i);
1863 break;
1864 case ASM_EXPR:
1865 fold_stmt (tp);
1866 data->last_goto = NULL;
1867 break;
1869 default:
1870 data->last_goto = NULL;
1871 break;
1875 static void
1876 remove_useless_stmts (void)
1878 struct rus_data data;
1880 clear_special_calls ();
1884 memset (&data, 0, sizeof (data));
1885 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1887 while (data.repeat);
1891 struct tree_opt_pass pass_remove_useless_stmts =
1893 "useless", /* name */
1894 NULL, /* gate */
1895 remove_useless_stmts, /* execute */
1896 NULL, /* sub */
1897 NULL, /* next */
1898 0, /* static_pass_number */
1899 0, /* tv_id */
1900 PROP_gimple_any, /* properties_required */
1901 0, /* properties_provided */
1902 0, /* properties_destroyed */
1903 0, /* todo_flags_start */
1904 TODO_dump_func, /* todo_flags_finish */
1905 0 /* letter */
1909 /* Remove obviously useless statements in basic block BB. */
1911 static void
1912 cfg_remove_useless_stmts_bb (basic_block bb)
1914 block_stmt_iterator bsi;
1915 tree stmt = NULL_TREE;
1916 tree cond, var = NULL_TREE, val = NULL_TREE;
1917 struct var_ann_d *ann;
1919 /* Check whether we come here from a condition, and if so, get the
1920 condition. */
1921 if (!single_pred_p (bb)
1922 || !(single_pred_edge (bb)->flags
1923 & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1924 return;
1926 cond = COND_EXPR_COND (last_stmt (single_pred (bb)));
1928 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1930 var = cond;
1931 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1932 ? boolean_false_node : boolean_true_node);
1934 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1935 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1936 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1938 var = TREE_OPERAND (cond, 0);
1939 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1940 ? boolean_true_node : boolean_false_node);
1942 else
1944 if (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE)
1945 cond = invert_truthvalue (cond);
1946 if (TREE_CODE (cond) == EQ_EXPR
1947 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1948 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1949 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1950 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1951 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1953 var = TREE_OPERAND (cond, 0);
1954 val = TREE_OPERAND (cond, 1);
1956 else
1957 return;
1960 /* Only work for normal local variables. */
1961 ann = var_ann (var);
1962 if (!ann
1963 || ann->may_aliases
1964 || TREE_ADDRESSABLE (var))
1965 return;
1967 if (! TREE_CONSTANT (val))
1969 ann = var_ann (val);
1970 if (!ann
1971 || ann->may_aliases
1972 || TREE_ADDRESSABLE (val))
1973 return;
1976 /* Ignore floating point variables, since comparison behaves weird for
1977 them. */
1978 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1979 return;
1981 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1983 stmt = bsi_stmt (bsi);
1985 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1986 which is already known to contain that value, then remove the useless
1987 THEN/ELSE clause. */
1988 if (TREE_CODE (stmt) == MODIFY_EXPR
1989 && TREE_OPERAND (stmt, 0) == var
1990 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1992 bsi_remove (&bsi);
1993 continue;
1996 /* Invalidate the var if we encounter something that could modify it.
1997 Likewise for the value it was previously set to. Note that we only
1998 consider values that are either a VAR_DECL or PARM_DECL so we
1999 can test for conflict very simply. */
2000 if (TREE_CODE (stmt) == ASM_EXPR
2001 || (TREE_CODE (stmt) == MODIFY_EXPR
2002 && (TREE_OPERAND (stmt, 0) == var
2003 || TREE_OPERAND (stmt, 0) == val)))
2004 return;
2006 bsi_next (&bsi);
2011 /* A CFG-aware version of remove_useless_stmts. */
2013 void
2014 cfg_remove_useless_stmts (void)
2016 basic_block bb;
2018 #ifdef ENABLE_CHECKING
2019 verify_flow_info ();
2020 #endif
2022 FOR_EACH_BB (bb)
2024 cfg_remove_useless_stmts_bb (bb);
2029 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2031 static void
2032 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2034 tree phi;
2036 /* Since this block is no longer reachable, we can just delete all
2037 of its PHI nodes. */
2038 phi = phi_nodes (bb);
2039 while (phi)
2041 tree next = PHI_CHAIN (phi);
2042 remove_phi_node (phi, NULL_TREE);
2043 phi = next;
2046 /* Remove edges to BB's successors. */
2047 while (EDGE_COUNT (bb->succs) > 0)
2048 remove_edge (EDGE_SUCC (bb, 0));
2052 /* Remove statements of basic block BB. */
2054 static void
2055 remove_bb (basic_block bb)
2057 block_stmt_iterator i;
2058 #ifdef USE_MAPPED_LOCATION
2059 source_location loc = UNKNOWN_LOCATION;
2060 #else
2061 source_locus loc = 0;
2062 #endif
2064 if (dump_file)
2066 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2067 if (dump_flags & TDF_DETAILS)
2069 dump_bb (bb, dump_file, 0);
2070 fprintf (dump_file, "\n");
2074 /* If we remove the header or the latch of a loop, mark the loop for
2075 removal by setting its header and latch to NULL. */
2076 if (current_loops)
2078 struct loop *loop = bb->loop_father;
2080 if (loop->latch == bb
2081 || loop->header == bb)
2083 loop->latch = NULL;
2084 loop->header = NULL;
2088 /* Remove all the instructions in the block. */
2089 for (i = bsi_start (bb); !bsi_end_p (i);)
2091 tree stmt = bsi_stmt (i);
2092 if (TREE_CODE (stmt) == LABEL_EXPR
2093 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2095 basic_block new_bb = bb->prev_bb;
2096 block_stmt_iterator new_bsi = bsi_start (new_bb);
2098 bsi_remove (&i);
2099 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2101 else
2103 release_defs (stmt);
2105 set_bb_for_stmt (stmt, NULL);
2106 bsi_remove (&i);
2109 /* Don't warn for removed gotos. Gotos are often removed due to
2110 jump threading, thus resulting in bogus warnings. Not great,
2111 since this way we lose warnings for gotos in the original
2112 program that are indeed unreachable. */
2113 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2115 #ifdef USE_MAPPED_LOCATION
2116 if (EXPR_HAS_LOCATION (stmt))
2117 loc = EXPR_LOCATION (stmt);
2118 #else
2119 source_locus t;
2120 t = EXPR_LOCUS (stmt);
2121 if (t && LOCATION_LINE (*t) > 0)
2122 loc = t;
2123 #endif
2127 /* If requested, give a warning that the first statement in the
2128 block is unreachable. We walk statements backwards in the
2129 loop above, so the last statement we process is the first statement
2130 in the block. */
2131 #ifdef USE_MAPPED_LOCATION
2132 if (warn_notreached && loc != UNKNOWN_LOCATION)
2133 warning ("%Hwill never be executed", &loc);
2134 #else
2135 if (warn_notreached && loc)
2136 warning ("%Hwill never be executed", loc);
2137 #endif
2139 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2142 /* A list of all the noreturn calls passed to modify_stmt.
2143 cleanup_control_flow uses it to detect cases where a mid-block
2144 indirect call has been turned into a noreturn call. When this
2145 happens, all the instructions after the call are no longer
2146 reachable and must be deleted as dead. */
2148 VEC(tree) *modified_noreturn_calls;
2150 /* Try to remove superfluous control structures. */
2152 static bool
2153 cleanup_control_flow (void)
2155 basic_block bb;
2156 block_stmt_iterator bsi;
2157 bool retval = false;
2158 tree stmt;
2160 /* Detect cases where a mid-block call is now known not to return. */
2161 while (VEC_length (tree, modified_noreturn_calls))
2163 stmt = VEC_pop (tree, modified_noreturn_calls);
2164 bb = bb_for_stmt (stmt);
2165 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2166 split_block (bb, stmt);
2169 FOR_EACH_BB (bb)
2171 bsi = bsi_last (bb);
2173 if (bsi_end_p (bsi))
2174 continue;
2176 stmt = bsi_stmt (bsi);
2177 if (TREE_CODE (stmt) == COND_EXPR
2178 || TREE_CODE (stmt) == SWITCH_EXPR)
2179 retval |= cleanup_control_expr_graph (bb, bsi);
2181 /* If we had a computed goto which has a compile-time determinable
2182 destination, then we can eliminate the goto. */
2183 if (TREE_CODE (stmt) == GOTO_EXPR
2184 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
2185 && TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) == LABEL_DECL)
2187 edge e;
2188 tree label;
2189 edge_iterator ei;
2190 basic_block target_block;
2191 bool removed_edge = false;
2193 /* First look at all the outgoing edges. Delete any outgoing
2194 edges which do not go to the right block. For the one
2195 edge which goes to the right block, fix up its flags. */
2196 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
2197 target_block = label_to_block (label);
2198 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2200 if (e->dest != target_block)
2202 removed_edge = true;
2203 remove_edge (e);
2205 else
2207 /* Turn off the EDGE_ABNORMAL flag. */
2208 e->flags &= ~EDGE_ABNORMAL;
2210 /* And set EDGE_FALLTHRU. */
2211 e->flags |= EDGE_FALLTHRU;
2212 ei_next (&ei);
2216 /* If we removed one or more edges, then we will need to fix the
2217 dominators. It may be possible to incrementally update them. */
2218 if (removed_edge)
2219 free_dominance_info (CDI_DOMINATORS);
2221 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
2222 relevant information we need. */
2223 bsi_remove (&bsi);
2224 retval = true;
2227 /* Check for indirect calls that have been turned into
2228 noreturn calls. */
2229 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2231 free_dominance_info (CDI_DOMINATORS);
2232 retval = true;
2235 return retval;
2239 /* Disconnect an unreachable block in the control expression starting
2240 at block BB. */
2242 static bool
2243 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2245 edge taken_edge;
2246 bool retval = false;
2247 tree expr = bsi_stmt (bsi), val;
2249 if (!single_succ_p (bb))
2251 edge e;
2252 edge_iterator ei;
2254 switch (TREE_CODE (expr))
2256 case COND_EXPR:
2257 val = COND_EXPR_COND (expr);
2258 break;
2260 case SWITCH_EXPR:
2261 val = SWITCH_COND (expr);
2262 if (TREE_CODE (val) != INTEGER_CST)
2263 return false;
2264 break;
2266 default:
2267 gcc_unreachable ();
2270 taken_edge = find_taken_edge (bb, val);
2271 if (!taken_edge)
2272 return false;
2274 /* Remove all the edges except the one that is always executed. */
2275 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2277 if (e != taken_edge)
2279 taken_edge->probability += e->probability;
2280 taken_edge->count += e->count;
2281 remove_edge (e);
2282 retval = true;
2284 else
2285 ei_next (&ei);
2287 if (taken_edge->probability > REG_BR_PROB_BASE)
2288 taken_edge->probability = REG_BR_PROB_BASE;
2290 else
2291 taken_edge = single_succ_edge (bb);
2293 bsi_remove (&bsi);
2294 taken_edge->flags = EDGE_FALLTHRU;
2296 /* We removed some paths from the cfg. */
2297 free_dominance_info (CDI_DOMINATORS);
2299 return retval;
2302 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2304 static bool
2305 remove_fallthru_edge (VEC(edge) *ev)
2307 edge_iterator ei;
2308 edge e;
2310 FOR_EACH_EDGE (e, ei, ev)
2311 if ((e->flags & EDGE_FALLTHRU) != 0)
2313 remove_edge (e);
2314 return true;
2316 return false;
2319 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2320 predicate VAL, return the edge that will be taken out of the block.
2321 If VAL does not match a unique edge, NULL is returned. */
2323 edge
2324 find_taken_edge (basic_block bb, tree val)
2326 tree stmt;
2328 stmt = last_stmt (bb);
2330 gcc_assert (stmt);
2331 gcc_assert (is_ctrl_stmt (stmt));
2332 gcc_assert (val);
2334 if (! is_gimple_min_invariant (val))
2335 return NULL;
2337 if (TREE_CODE (stmt) == COND_EXPR)
2338 return find_taken_edge_cond_expr (bb, val);
2340 if (TREE_CODE (stmt) == SWITCH_EXPR)
2341 return find_taken_edge_switch_expr (bb, val);
2343 if (computed_goto_p (stmt))
2344 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2346 gcc_unreachable ();
2349 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2350 statement, determine which of the outgoing edges will be taken out of the
2351 block. Return NULL if either edge may be taken. */
2353 static edge
2354 find_taken_edge_computed_goto (basic_block bb, tree val)
2356 basic_block dest;
2357 edge e = NULL;
2359 dest = label_to_block (val);
2360 if (dest)
2362 e = find_edge (bb, dest);
2363 gcc_assert (e != NULL);
2366 return e;
2369 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2370 statement, determine which of the two edges will be taken out of the
2371 block. Return NULL if either edge may be taken. */
2373 static edge
2374 find_taken_edge_cond_expr (basic_block bb, tree val)
2376 edge true_edge, false_edge;
2378 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2380 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2381 return (zero_p (val) ? false_edge : true_edge);
2384 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2385 statement, determine which edge will be taken out of the block. Return
2386 NULL if any edge may be taken. */
2388 static edge
2389 find_taken_edge_switch_expr (basic_block bb, tree val)
2391 tree switch_expr, taken_case;
2392 basic_block dest_bb;
2393 edge e;
2395 switch_expr = last_stmt (bb);
2396 taken_case = find_case_label_for_value (switch_expr, val);
2397 dest_bb = label_to_block (CASE_LABEL (taken_case));
2399 e = find_edge (bb, dest_bb);
2400 gcc_assert (e);
2401 return e;
2405 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2406 We can make optimal use here of the fact that the case labels are
2407 sorted: We can do a binary search for a case matching VAL. */
2409 static tree
2410 find_case_label_for_value (tree switch_expr, tree val)
2412 tree vec = SWITCH_LABELS (switch_expr);
2413 size_t low, high, n = TREE_VEC_LENGTH (vec);
2414 tree default_case = TREE_VEC_ELT (vec, n - 1);
2416 for (low = -1, high = n - 1; high - low > 1; )
2418 size_t i = (high + low) / 2;
2419 tree t = TREE_VEC_ELT (vec, i);
2420 int cmp;
2422 /* Cache the result of comparing CASE_LOW and val. */
2423 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2425 if (cmp > 0)
2426 high = i;
2427 else
2428 low = i;
2430 if (CASE_HIGH (t) == NULL)
2432 /* A singe-valued case label. */
2433 if (cmp == 0)
2434 return t;
2436 else
2438 /* A case range. We can only handle integer ranges. */
2439 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2440 return t;
2444 return default_case;
2448 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2449 those alternatives are equal in each of the PHI nodes, then return
2450 true, else return false. */
2452 static bool
2453 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2455 int n1 = e1->dest_idx;
2456 int n2 = e2->dest_idx;
2457 tree phi;
2459 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2461 tree val1 = PHI_ARG_DEF (phi, n1);
2462 tree val2 = PHI_ARG_DEF (phi, n2);
2464 gcc_assert (val1 != NULL_TREE);
2465 gcc_assert (val2 != NULL_TREE);
2467 if (!operand_equal_for_phi_arg_p (val1, val2))
2468 return false;
2471 return true;
2475 /*---------------------------------------------------------------------------
2476 Debugging functions
2477 ---------------------------------------------------------------------------*/
2479 /* Dump tree-specific information of block BB to file OUTF. */
2481 void
2482 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2484 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2488 /* Dump a basic block on stderr. */
2490 void
2491 debug_tree_bb (basic_block bb)
2493 dump_bb (bb, stderr, 0);
2497 /* Dump basic block with index N on stderr. */
2499 basic_block
2500 debug_tree_bb_n (int n)
2502 debug_tree_bb (BASIC_BLOCK (n));
2503 return BASIC_BLOCK (n);
2507 /* Dump the CFG on stderr.
2509 FLAGS are the same used by the tree dumping functions
2510 (see TDF_* in tree.h). */
2512 void
2513 debug_tree_cfg (int flags)
2515 dump_tree_cfg (stderr, flags);
2519 /* Dump the program showing basic block boundaries on the given FILE.
2521 FLAGS are the same used by the tree dumping functions (see TDF_* in
2522 tree.h). */
2524 void
2525 dump_tree_cfg (FILE *file, int flags)
2527 if (flags & TDF_DETAILS)
2529 const char *funcname
2530 = lang_hooks.decl_printable_name (current_function_decl, 2);
2532 fputc ('\n', file);
2533 fprintf (file, ";; Function %s\n\n", funcname);
2534 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2535 n_basic_blocks, n_edges, last_basic_block);
2537 brief_dump_cfg (file);
2538 fprintf (file, "\n");
2541 if (flags & TDF_STATS)
2542 dump_cfg_stats (file);
2544 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2548 /* Dump CFG statistics on FILE. */
2550 void
2551 dump_cfg_stats (FILE *file)
2553 static long max_num_merged_labels = 0;
2554 unsigned long size, total = 0;
2555 int n_edges;
2556 basic_block bb;
2557 const char * const fmt_str = "%-30s%-13s%12s\n";
2558 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2559 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2560 const char *funcname
2561 = lang_hooks.decl_printable_name (current_function_decl, 2);
2564 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2566 fprintf (file, "---------------------------------------------------------\n");
2567 fprintf (file, fmt_str, "", " Number of ", "Memory");
2568 fprintf (file, fmt_str, "", " instances ", "used ");
2569 fprintf (file, "---------------------------------------------------------\n");
2571 size = n_basic_blocks * sizeof (struct basic_block_def);
2572 total += size;
2573 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2574 SCALE (size), LABEL (size));
2576 n_edges = 0;
2577 FOR_EACH_BB (bb)
2578 n_edges += EDGE_COUNT (bb->succs);
2579 size = n_edges * sizeof (struct edge_def);
2580 total += size;
2581 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2583 size = n_basic_blocks * sizeof (struct bb_ann_d);
2584 total += size;
2585 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2586 SCALE (size), LABEL (size));
2588 fprintf (file, "---------------------------------------------------------\n");
2589 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2590 LABEL (total));
2591 fprintf (file, "---------------------------------------------------------\n");
2592 fprintf (file, "\n");
2594 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2595 max_num_merged_labels = cfg_stats.num_merged_labels;
2597 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2598 cfg_stats.num_merged_labels, max_num_merged_labels);
2600 fprintf (file, "\n");
2604 /* Dump CFG statistics on stderr. Keep extern so that it's always
2605 linked in the final executable. */
2607 void
2608 debug_cfg_stats (void)
2610 dump_cfg_stats (stderr);
2614 /* Dump the flowgraph to a .vcg FILE. */
2616 static void
2617 tree_cfg2vcg (FILE *file)
2619 edge e;
2620 edge_iterator ei;
2621 basic_block bb;
2622 const char *funcname
2623 = lang_hooks.decl_printable_name (current_function_decl, 2);
2625 /* Write the file header. */
2626 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2627 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2628 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2630 /* Write blocks and edges. */
2631 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2633 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2634 e->dest->index);
2636 if (e->flags & EDGE_FAKE)
2637 fprintf (file, " linestyle: dotted priority: 10");
2638 else
2639 fprintf (file, " linestyle: solid priority: 100");
2641 fprintf (file, " }\n");
2643 fputc ('\n', file);
2645 FOR_EACH_BB (bb)
2647 enum tree_code head_code, end_code;
2648 const char *head_name, *end_name;
2649 int head_line = 0;
2650 int end_line = 0;
2651 tree first = first_stmt (bb);
2652 tree last = last_stmt (bb);
2654 if (first)
2656 head_code = TREE_CODE (first);
2657 head_name = tree_code_name[head_code];
2658 head_line = get_lineno (first);
2660 else
2661 head_name = "no-statement";
2663 if (last)
2665 end_code = TREE_CODE (last);
2666 end_name = tree_code_name[end_code];
2667 end_line = get_lineno (last);
2669 else
2670 end_name = "no-statement";
2672 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2673 bb->index, bb->index, head_name, head_line, end_name,
2674 end_line);
2676 FOR_EACH_EDGE (e, ei, bb->succs)
2678 if (e->dest == EXIT_BLOCK_PTR)
2679 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2680 else
2681 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2683 if (e->flags & EDGE_FAKE)
2684 fprintf (file, " priority: 10 linestyle: dotted");
2685 else
2686 fprintf (file, " priority: 100 linestyle: solid");
2688 fprintf (file, " }\n");
2691 if (bb->next_bb != EXIT_BLOCK_PTR)
2692 fputc ('\n', file);
2695 fputs ("}\n\n", file);
2700 /*---------------------------------------------------------------------------
2701 Miscellaneous helpers
2702 ---------------------------------------------------------------------------*/
2704 /* Return true if T represents a stmt that always transfers control. */
2706 bool
2707 is_ctrl_stmt (tree t)
2709 return (TREE_CODE (t) == COND_EXPR
2710 || TREE_CODE (t) == SWITCH_EXPR
2711 || TREE_CODE (t) == GOTO_EXPR
2712 || TREE_CODE (t) == RETURN_EXPR
2713 || TREE_CODE (t) == RESX_EXPR);
2717 /* Return true if T is a statement that may alter the flow of control
2718 (e.g., a call to a non-returning function). */
2720 bool
2721 is_ctrl_altering_stmt (tree t)
2723 tree call;
2725 gcc_assert (t);
2726 call = get_call_expr_in (t);
2727 if (call)
2729 /* A non-pure/const CALL_EXPR alters flow control if the current
2730 function has nonlocal labels. */
2731 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2732 return true;
2734 /* A CALL_EXPR also alters control flow if it does not return. */
2735 if (call_expr_flags (call) & ECF_NORETURN)
2736 return true;
2739 /* If a statement can throw, it alters control flow. */
2740 return tree_can_throw_internal (t);
2744 /* Return true if T is a computed goto. */
2746 bool
2747 computed_goto_p (tree t)
2749 return (TREE_CODE (t) == GOTO_EXPR
2750 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2754 /* Checks whether EXPR is a simple local goto. */
2756 bool
2757 simple_goto_p (tree expr)
2759 return (TREE_CODE (expr) == GOTO_EXPR
2760 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2764 /* Return true if T should start a new basic block. PREV_T is the
2765 statement preceding T. It is used when T is a label or a case label.
2766 Labels should only start a new basic block if their previous statement
2767 wasn't a label. Otherwise, sequence of labels would generate
2768 unnecessary basic blocks that only contain a single label. */
2770 static inline bool
2771 stmt_starts_bb_p (tree t, tree prev_t)
2773 if (t == NULL_TREE)
2774 return false;
2776 /* LABEL_EXPRs start a new basic block only if the preceding
2777 statement wasn't a label of the same type. This prevents the
2778 creation of consecutive blocks that have nothing but a single
2779 label. */
2780 if (TREE_CODE (t) == LABEL_EXPR)
2782 /* Nonlocal and computed GOTO targets always start a new block. */
2783 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2784 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2785 return true;
2787 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2789 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2790 return true;
2792 cfg_stats.num_merged_labels++;
2793 return false;
2795 else
2796 return true;
2799 return false;
2803 /* Return true if T should end a basic block. */
2805 bool
2806 stmt_ends_bb_p (tree t)
2808 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2812 /* Add gotos that used to be represented implicitly in the CFG. */
2814 void
2815 disband_implicit_edges (void)
2817 basic_block bb;
2818 block_stmt_iterator last;
2819 edge e;
2820 edge_iterator ei;
2821 tree stmt, label;
2823 FOR_EACH_BB (bb)
2825 last = bsi_last (bb);
2826 stmt = last_stmt (bb);
2828 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2830 /* Remove superfluous gotos from COND_EXPR branches. Moved
2831 from cfg_remove_useless_stmts here since it violates the
2832 invariants for tree--cfg correspondence and thus fits better
2833 here where we do it anyway. */
2834 e = find_edge (bb, bb->next_bb);
2835 if (e)
2837 if (e->flags & EDGE_TRUE_VALUE)
2838 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2839 else if (e->flags & EDGE_FALSE_VALUE)
2840 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2841 else
2842 gcc_unreachable ();
2843 e->flags |= EDGE_FALLTHRU;
2846 continue;
2849 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2851 /* Remove the RETURN_EXPR if we may fall though to the exit
2852 instead. */
2853 gcc_assert (single_succ_p (bb));
2854 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2856 if (bb->next_bb == EXIT_BLOCK_PTR
2857 && !TREE_OPERAND (stmt, 0))
2859 bsi_remove (&last);
2860 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2862 continue;
2865 /* There can be no fallthru edge if the last statement is a control
2866 one. */
2867 if (stmt && is_ctrl_stmt (stmt))
2868 continue;
2870 /* Find a fallthru edge and emit the goto if necessary. */
2871 FOR_EACH_EDGE (e, ei, bb->succs)
2872 if (e->flags & EDGE_FALLTHRU)
2873 break;
2875 if (!e || e->dest == bb->next_bb)
2876 continue;
2878 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2879 label = tree_block_label (e->dest);
2881 stmt = build1 (GOTO_EXPR, void_type_node, label);
2882 #ifdef USE_MAPPED_LOCATION
2883 SET_EXPR_LOCATION (stmt, e->goto_locus);
2884 #else
2885 SET_EXPR_LOCUS (stmt, e->goto_locus);
2886 #endif
2887 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2888 e->flags &= ~EDGE_FALLTHRU;
2892 /* Remove block annotations and other datastructures. */
2894 void
2895 delete_tree_cfg_annotations (void)
2897 basic_block bb;
2898 if (n_basic_blocks > 0)
2899 free_blocks_annotations ();
2901 label_to_block_map = NULL;
2902 free_rbi_pool ();
2903 FOR_EACH_BB (bb)
2904 bb->rbi = NULL;
2908 /* Return the first statement in basic block BB. */
2910 tree
2911 first_stmt (basic_block bb)
2913 block_stmt_iterator i = bsi_start (bb);
2914 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2918 /* Return the last statement in basic block BB. */
2920 tree
2921 last_stmt (basic_block bb)
2923 block_stmt_iterator b = bsi_last (bb);
2924 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2928 /* Return a pointer to the last statement in block BB. */
2930 tree *
2931 last_stmt_ptr (basic_block bb)
2933 block_stmt_iterator last = bsi_last (bb);
2934 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2938 /* Return the last statement of an otherwise empty block. Return NULL
2939 if the block is totally empty, or if it contains more than one
2940 statement. */
2942 tree
2943 last_and_only_stmt (basic_block bb)
2945 block_stmt_iterator i = bsi_last (bb);
2946 tree last, prev;
2948 if (bsi_end_p (i))
2949 return NULL_TREE;
2951 last = bsi_stmt (i);
2952 bsi_prev (&i);
2953 if (bsi_end_p (i))
2954 return last;
2956 /* Empty statements should no longer appear in the instruction stream.
2957 Everything that might have appeared before should be deleted by
2958 remove_useless_stmts, and the optimizers should just bsi_remove
2959 instead of smashing with build_empty_stmt.
2961 Thus the only thing that should appear here in a block containing
2962 one executable statement is a label. */
2963 prev = bsi_stmt (i);
2964 if (TREE_CODE (prev) == LABEL_EXPR)
2965 return last;
2966 else
2967 return NULL_TREE;
2971 /* Mark BB as the basic block holding statement T. */
2973 void
2974 set_bb_for_stmt (tree t, basic_block bb)
2976 if (TREE_CODE (t) == PHI_NODE)
2977 PHI_BB (t) = bb;
2978 else if (TREE_CODE (t) == STATEMENT_LIST)
2980 tree_stmt_iterator i;
2981 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2982 set_bb_for_stmt (tsi_stmt (i), bb);
2984 else
2986 stmt_ann_t ann = get_stmt_ann (t);
2987 ann->bb = bb;
2989 /* If the statement is a label, add the label to block-to-labels map
2990 so that we can speed up edge creation for GOTO_EXPRs. */
2991 if (TREE_CODE (t) == LABEL_EXPR)
2993 int uid;
2995 t = LABEL_EXPR_LABEL (t);
2996 uid = LABEL_DECL_UID (t);
2997 if (uid == -1)
2999 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
3000 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
3001 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
3003 else
3004 /* We're moving an existing label. Make sure that we've
3005 removed it from the old block. */
3006 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
3007 VARRAY_BB (label_to_block_map, uid) = bb;
3012 /* Finds iterator for STMT. */
3014 extern block_stmt_iterator
3015 bsi_for_stmt (tree stmt)
3017 block_stmt_iterator bsi;
3019 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
3020 if (bsi_stmt (bsi) == stmt)
3021 return bsi;
3023 gcc_unreachable ();
3026 /* Insert statement (or statement list) T before the statement
3027 pointed-to by iterator I. M specifies how to update iterator I
3028 after insertion (see enum bsi_iterator_update). */
3030 void
3031 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3033 set_bb_for_stmt (t, i->bb);
3034 tsi_link_before (&i->tsi, t, m);
3035 modify_stmt (t);
3039 /* Insert statement (or statement list) T after the statement
3040 pointed-to by iterator I. M specifies how to update iterator I
3041 after insertion (see enum bsi_iterator_update). */
3043 void
3044 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3046 set_bb_for_stmt (t, i->bb);
3047 tsi_link_after (&i->tsi, t, m);
3048 modify_stmt (t);
3052 /* Remove the statement pointed to by iterator I. The iterator is updated
3053 to the next statement. */
3055 void
3056 bsi_remove (block_stmt_iterator *i)
3058 tree t = bsi_stmt (*i);
3059 set_bb_for_stmt (t, NULL);
3060 tsi_delink (&i->tsi);
3064 /* Move the statement at FROM so it comes right after the statement at TO. */
3066 void
3067 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
3069 tree stmt = bsi_stmt (*from);
3070 bsi_remove (from);
3071 bsi_insert_after (to, stmt, BSI_SAME_STMT);
3075 /* Move the statement at FROM so it comes right before the statement at TO. */
3077 void
3078 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
3080 tree stmt = bsi_stmt (*from);
3081 bsi_remove (from);
3082 bsi_insert_before (to, stmt, BSI_SAME_STMT);
3086 /* Move the statement at FROM to the end of basic block BB. */
3088 void
3089 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
3091 block_stmt_iterator last = bsi_last (bb);
3093 /* Have to check bsi_end_p because it could be an empty block. */
3094 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
3095 bsi_move_before (from, &last);
3096 else
3097 bsi_move_after (from, &last);
3101 /* Replace the contents of the statement pointed to by iterator BSI
3102 with STMT. If PRESERVE_EH_INFO is true, the exception handling
3103 information of the original statement is preserved. */
3105 void
3106 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
3108 int eh_region;
3109 tree orig_stmt = bsi_stmt (*bsi);
3111 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
3112 set_bb_for_stmt (stmt, bsi->bb);
3114 /* Preserve EH region information from the original statement, if
3115 requested by the caller. */
3116 if (preserve_eh_info)
3118 eh_region = lookup_stmt_eh_region (orig_stmt);
3119 if (eh_region >= 0)
3120 add_stmt_to_eh_region (stmt, eh_region);
3123 *bsi_stmt_ptr (*bsi) = stmt;
3124 modify_stmt (stmt);
3128 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3129 is made to place the statement in an existing basic block, but
3130 sometimes that isn't possible. When it isn't possible, the edge is
3131 split and the statement is added to the new block.
3133 In all cases, the returned *BSI points to the correct location. The
3134 return value is true if insertion should be done after the location,
3135 or false if it should be done before the location. If new basic block
3136 has to be created, it is stored in *NEW_BB. */
3138 static bool
3139 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3140 basic_block *new_bb)
3142 basic_block dest, src;
3143 tree tmp;
3145 dest = e->dest;
3146 restart:
3148 /* If the destination has one predecessor which has no PHI nodes,
3149 insert there. Except for the exit block.
3151 The requirement for no PHI nodes could be relaxed. Basically we
3152 would have to examine the PHIs to prove that none of them used
3153 the value set by the statement we want to insert on E. That
3154 hardly seems worth the effort. */
3155 if (single_pred_p (dest)
3156 && ! phi_nodes (dest)
3157 && dest != EXIT_BLOCK_PTR)
3159 *bsi = bsi_start (dest);
3160 if (bsi_end_p (*bsi))
3161 return true;
3163 /* Make sure we insert after any leading labels. */
3164 tmp = bsi_stmt (*bsi);
3165 while (TREE_CODE (tmp) == LABEL_EXPR)
3167 bsi_next (bsi);
3168 if (bsi_end_p (*bsi))
3169 break;
3170 tmp = bsi_stmt (*bsi);
3173 if (bsi_end_p (*bsi))
3175 *bsi = bsi_last (dest);
3176 return true;
3178 else
3179 return false;
3182 /* If the source has one successor, the edge is not abnormal and
3183 the last statement does not end a basic block, insert there.
3184 Except for the entry block. */
3185 src = e->src;
3186 if ((e->flags & EDGE_ABNORMAL) == 0
3187 && single_succ_p (src)
3188 && src != ENTRY_BLOCK_PTR)
3190 *bsi = bsi_last (src);
3191 if (bsi_end_p (*bsi))
3192 return true;
3194 tmp = bsi_stmt (*bsi);
3195 if (!stmt_ends_bb_p (tmp))
3196 return true;
3198 /* Insert code just before returning the value. We may need to decompose
3199 the return in the case it contains non-trivial operand. */
3200 if (TREE_CODE (tmp) == RETURN_EXPR)
3202 tree op = TREE_OPERAND (tmp, 0);
3203 if (!is_gimple_val (op))
3205 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3206 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3207 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3209 bsi_prev (bsi);
3210 return true;
3214 /* Otherwise, create a new basic block, and split this edge. */
3215 dest = split_edge (e);
3216 if (new_bb)
3217 *new_bb = dest;
3218 e = single_pred_edge (dest);
3219 goto restart;
3223 /* This routine will commit all pending edge insertions, creating any new
3224 basic blocks which are necessary. */
3226 void
3227 bsi_commit_edge_inserts (void)
3229 basic_block bb;
3230 edge e;
3231 edge_iterator ei;
3233 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3235 FOR_EACH_BB (bb)
3236 FOR_EACH_EDGE (e, ei, bb->succs)
3237 bsi_commit_one_edge_insert (e, NULL);
3241 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3242 to this block, otherwise set it to NULL. */
3244 void
3245 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3247 if (new_bb)
3248 *new_bb = NULL;
3249 if (PENDING_STMT (e))
3251 block_stmt_iterator bsi;
3252 tree stmt = PENDING_STMT (e);
3254 PENDING_STMT (e) = NULL_TREE;
3256 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3257 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3258 else
3259 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3264 /* Add STMT to the pending list of edge E. No actual insertion is
3265 made until a call to bsi_commit_edge_inserts () is made. */
3267 void
3268 bsi_insert_on_edge (edge e, tree stmt)
3270 append_to_statement_list (stmt, &PENDING_STMT (e));
3273 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3274 block has to be created, it is returned. */
3276 basic_block
3277 bsi_insert_on_edge_immediate (edge e, tree stmt)
3279 block_stmt_iterator bsi;
3280 basic_block new_bb = NULL;
3282 gcc_assert (!PENDING_STMT (e));
3284 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3285 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3286 else
3287 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3289 return new_bb;
3292 /*---------------------------------------------------------------------------
3293 Tree specific functions for CFG manipulation
3294 ---------------------------------------------------------------------------*/
3296 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3298 static void
3299 reinstall_phi_args (edge new_edge, edge old_edge)
3301 tree var, phi;
3303 if (!PENDING_STMT (old_edge))
3304 return;
3306 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3307 var && phi;
3308 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3310 tree result = TREE_PURPOSE (var);
3311 tree arg = TREE_VALUE (var);
3313 gcc_assert (result == PHI_RESULT (phi));
3315 add_phi_arg (phi, arg, new_edge);
3318 PENDING_STMT (old_edge) = NULL;
3321 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3322 Abort on abnormal edges. */
3324 static basic_block
3325 tree_split_edge (edge edge_in)
3327 basic_block new_bb, after_bb, dest, src;
3328 edge new_edge, e;
3330 /* Abnormal edges cannot be split. */
3331 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3333 src = edge_in->src;
3334 dest = edge_in->dest;
3336 /* Place the new block in the block list. Try to keep the new block
3337 near its "logical" location. This is of most help to humans looking
3338 at debugging dumps. */
3339 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3340 after_bb = edge_in->src;
3341 else
3342 after_bb = dest->prev_bb;
3344 new_bb = create_empty_bb (after_bb);
3345 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3346 new_bb->count = edge_in->count;
3347 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3348 new_edge->probability = REG_BR_PROB_BASE;
3349 new_edge->count = edge_in->count;
3351 e = redirect_edge_and_branch (edge_in, new_bb);
3352 gcc_assert (e);
3353 reinstall_phi_args (new_edge, e);
3355 return new_bb;
3359 /* Return true when BB has label LABEL in it. */
3361 static bool
3362 has_label_p (basic_block bb, tree label)
3364 block_stmt_iterator bsi;
3366 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3368 tree stmt = bsi_stmt (bsi);
3370 if (TREE_CODE (stmt) != LABEL_EXPR)
3371 return false;
3372 if (LABEL_EXPR_LABEL (stmt) == label)
3373 return true;
3375 return false;
3379 /* Callback for walk_tree, check that all elements with address taken are
3380 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3381 inside a PHI node. */
3383 static tree
3384 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3386 tree t = *tp, x;
3387 bool in_phi = (data != NULL);
3389 if (TYPE_P (t))
3390 *walk_subtrees = 0;
3392 /* Check operand N for being valid GIMPLE and give error MSG if not.
3393 We check for constants explicitly since they are not considered
3394 gimple invariants if they overflowed. */
3395 #define CHECK_OP(N, MSG) \
3396 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3397 && !is_gimple_val (TREE_OPERAND (t, N))) \
3398 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3400 switch (TREE_CODE (t))
3402 case SSA_NAME:
3403 if (SSA_NAME_IN_FREE_LIST (t))
3405 error ("SSA name in freelist but still referenced");
3406 return *tp;
3408 break;
3410 case MODIFY_EXPR:
3411 x = TREE_OPERAND (t, 0);
3412 if (TREE_CODE (x) == BIT_FIELD_REF
3413 && is_gimple_reg (TREE_OPERAND (x, 0)))
3415 error ("GIMPLE register modified with BIT_FIELD_REF");
3416 return t;
3418 break;
3420 case ADDR_EXPR:
3421 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3422 dead PHIs that take the address of something. But if the PHI
3423 result is dead, the fact that it takes the address of anything
3424 is irrelevant. Because we can not tell from here if a PHI result
3425 is dead, we just skip this check for PHIs altogether. This means
3426 we may be missing "valid" checks, but what can you do?
3427 This was PR19217. */
3428 if (in_phi)
3429 break;
3431 /* Skip any references (they will be checked when we recurse down the
3432 tree) and ensure that any variable used as a prefix is marked
3433 addressable. */
3434 for (x = TREE_OPERAND (t, 0);
3435 handled_component_p (x);
3436 x = TREE_OPERAND (x, 0))
3439 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3440 return NULL;
3441 if (!TREE_ADDRESSABLE (x))
3443 error ("address taken, but ADDRESSABLE bit not set");
3444 return x;
3446 break;
3448 case COND_EXPR:
3449 x = COND_EXPR_COND (t);
3450 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3452 error ("non-boolean used in condition");
3453 return x;
3455 break;
3457 case NOP_EXPR:
3458 case CONVERT_EXPR:
3459 case FIX_TRUNC_EXPR:
3460 case FIX_CEIL_EXPR:
3461 case FIX_FLOOR_EXPR:
3462 case FIX_ROUND_EXPR:
3463 case FLOAT_EXPR:
3464 case NEGATE_EXPR:
3465 case ABS_EXPR:
3466 case BIT_NOT_EXPR:
3467 case NON_LVALUE_EXPR:
3468 case TRUTH_NOT_EXPR:
3469 CHECK_OP (0, "Invalid operand to unary operator");
3470 break;
3472 case REALPART_EXPR:
3473 case IMAGPART_EXPR:
3474 case COMPONENT_REF:
3475 case ARRAY_REF:
3476 case ARRAY_RANGE_REF:
3477 case BIT_FIELD_REF:
3478 case VIEW_CONVERT_EXPR:
3479 /* We have a nest of references. Verify that each of the operands
3480 that determine where to reference is either a constant or a variable,
3481 verify that the base is valid, and then show we've already checked
3482 the subtrees. */
3483 while (handled_component_p (t))
3485 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3486 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3487 else if (TREE_CODE (t) == ARRAY_REF
3488 || TREE_CODE (t) == ARRAY_RANGE_REF)
3490 CHECK_OP (1, "Invalid array index.");
3491 if (TREE_OPERAND (t, 2))
3492 CHECK_OP (2, "Invalid array lower bound.");
3493 if (TREE_OPERAND (t, 3))
3494 CHECK_OP (3, "Invalid array stride.");
3496 else if (TREE_CODE (t) == BIT_FIELD_REF)
3498 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3499 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3502 t = TREE_OPERAND (t, 0);
3505 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3507 error ("Invalid reference prefix.");
3508 return t;
3510 *walk_subtrees = 0;
3511 break;
3513 case LT_EXPR:
3514 case LE_EXPR:
3515 case GT_EXPR:
3516 case GE_EXPR:
3517 case EQ_EXPR:
3518 case NE_EXPR:
3519 case UNORDERED_EXPR:
3520 case ORDERED_EXPR:
3521 case UNLT_EXPR:
3522 case UNLE_EXPR:
3523 case UNGT_EXPR:
3524 case UNGE_EXPR:
3525 case UNEQ_EXPR:
3526 case LTGT_EXPR:
3527 case PLUS_EXPR:
3528 case MINUS_EXPR:
3529 case MULT_EXPR:
3530 case TRUNC_DIV_EXPR:
3531 case CEIL_DIV_EXPR:
3532 case FLOOR_DIV_EXPR:
3533 case ROUND_DIV_EXPR:
3534 case TRUNC_MOD_EXPR:
3535 case CEIL_MOD_EXPR:
3536 case FLOOR_MOD_EXPR:
3537 case ROUND_MOD_EXPR:
3538 case RDIV_EXPR:
3539 case EXACT_DIV_EXPR:
3540 case MIN_EXPR:
3541 case MAX_EXPR:
3542 case LSHIFT_EXPR:
3543 case RSHIFT_EXPR:
3544 case LROTATE_EXPR:
3545 case RROTATE_EXPR:
3546 case BIT_IOR_EXPR:
3547 case BIT_XOR_EXPR:
3548 case BIT_AND_EXPR:
3549 CHECK_OP (0, "Invalid operand to binary operator");
3550 CHECK_OP (1, "Invalid operand to binary operator");
3551 break;
3553 default:
3554 break;
3556 return NULL;
3558 #undef CHECK_OP
3562 /* Verify STMT, return true if STMT is not in GIMPLE form.
3563 TODO: Implement type checking. */
3565 static bool
3566 verify_stmt (tree stmt, bool last_in_block)
3568 tree addr;
3570 if (!is_gimple_stmt (stmt))
3572 error ("Is not a valid GIMPLE statement.");
3573 goto fail;
3576 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3577 if (addr)
3579 debug_generic_stmt (addr);
3580 return true;
3583 /* If the statement is marked as part of an EH region, then it is
3584 expected that the statement could throw. Verify that when we
3585 have optimizations that simplify statements such that we prove
3586 that they cannot throw, that we update other data structures
3587 to match. */
3588 if (lookup_stmt_eh_region (stmt) >= 0)
3590 if (!tree_could_throw_p (stmt))
3592 error ("Statement marked for throw, but doesn%'t.");
3593 goto fail;
3595 if (!last_in_block && tree_can_throw_internal (stmt))
3597 error ("Statement marked for throw in middle of block.");
3598 goto fail;
3602 return false;
3604 fail:
3605 debug_generic_stmt (stmt);
3606 return true;
3610 /* Return true when the T can be shared. */
3612 static bool
3613 tree_node_can_be_shared (tree t)
3615 if (IS_TYPE_OR_DECL_P (t)
3616 /* We check for constants explicitly since they are not considered
3617 gimple invariants if they overflowed. */
3618 || CONSTANT_CLASS_P (t)
3619 || is_gimple_min_invariant (t)
3620 || TREE_CODE (t) == SSA_NAME
3621 || t == error_mark_node)
3622 return true;
3624 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3625 return true;
3627 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3628 /* We check for constants explicitly since they are not considered
3629 gimple invariants if they overflowed. */
3630 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3631 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3632 || (TREE_CODE (t) == COMPONENT_REF
3633 || TREE_CODE (t) == REALPART_EXPR
3634 || TREE_CODE (t) == IMAGPART_EXPR))
3635 t = TREE_OPERAND (t, 0);
3637 if (DECL_P (t))
3638 return true;
3640 return false;
3644 /* Called via walk_trees. Verify tree sharing. */
3646 static tree
3647 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3649 htab_t htab = (htab_t) data;
3650 void **slot;
3652 if (tree_node_can_be_shared (*tp))
3654 *walk_subtrees = false;
3655 return NULL;
3658 slot = htab_find_slot (htab, *tp, INSERT);
3659 if (*slot)
3660 return *slot;
3661 *slot = *tp;
3663 return NULL;
3667 /* Verify the GIMPLE statement chain. */
3669 void
3670 verify_stmts (void)
3672 basic_block bb;
3673 block_stmt_iterator bsi;
3674 bool err = false;
3675 htab_t htab;
3676 tree addr;
3678 timevar_push (TV_TREE_STMT_VERIFY);
3679 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3681 FOR_EACH_BB (bb)
3683 tree phi;
3684 int i;
3686 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3688 int phi_num_args = PHI_NUM_ARGS (phi);
3690 if (bb_for_stmt (phi) != bb)
3692 error ("bb_for_stmt (phi) is set to a wrong basic block\n");
3693 err |= true;
3696 for (i = 0; i < phi_num_args; i++)
3698 tree t = PHI_ARG_DEF (phi, i);
3699 tree addr;
3701 /* Addressable variables do have SSA_NAMEs but they
3702 are not considered gimple values. */
3703 if (TREE_CODE (t) != SSA_NAME
3704 && TREE_CODE (t) != FUNCTION_DECL
3705 && !is_gimple_val (t))
3707 error ("PHI def is not a GIMPLE value");
3708 debug_generic_stmt (phi);
3709 debug_generic_stmt (t);
3710 err |= true;
3713 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3714 if (addr)
3716 debug_generic_stmt (addr);
3717 err |= true;
3720 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3721 if (addr)
3723 error ("Incorrect sharing of tree nodes");
3724 debug_generic_stmt (phi);
3725 debug_generic_stmt (addr);
3726 err |= true;
3731 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3733 tree stmt = bsi_stmt (bsi);
3735 if (bb_for_stmt (stmt) != bb)
3737 error ("bb_for_stmt (stmt) is set to a wrong basic block\n");
3738 err |= true;
3741 bsi_next (&bsi);
3742 err |= verify_stmt (stmt, bsi_end_p (bsi));
3743 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3744 if (addr)
3746 error ("Incorrect sharing of tree nodes");
3747 debug_generic_stmt (stmt);
3748 debug_generic_stmt (addr);
3749 err |= true;
3754 if (err)
3755 internal_error ("verify_stmts failed.");
3757 htab_delete (htab);
3758 timevar_pop (TV_TREE_STMT_VERIFY);
3762 /* Verifies that the flow information is OK. */
3764 static int
3765 tree_verify_flow_info (void)
3767 int err = 0;
3768 basic_block bb;
3769 block_stmt_iterator bsi;
3770 tree stmt;
3771 edge e;
3772 edge_iterator ei;
3774 if (ENTRY_BLOCK_PTR->stmt_list)
3776 error ("ENTRY_BLOCK has a statement list associated with it\n");
3777 err = 1;
3780 if (EXIT_BLOCK_PTR->stmt_list)
3782 error ("EXIT_BLOCK has a statement list associated with it\n");
3783 err = 1;
3786 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3787 if (e->flags & EDGE_FALLTHRU)
3789 error ("Fallthru to exit from bb %d\n", e->src->index);
3790 err = 1;
3793 FOR_EACH_BB (bb)
3795 bool found_ctrl_stmt = false;
3797 stmt = NULL_TREE;
3799 /* Skip labels on the start of basic block. */
3800 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3802 tree prev_stmt = stmt;
3804 stmt = bsi_stmt (bsi);
3806 if (TREE_CODE (stmt) != LABEL_EXPR)
3807 break;
3809 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3811 error ("Nonlocal label %s is not first "
3812 "in a sequence of labels in bb %d",
3813 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3814 bb->index);
3815 err = 1;
3818 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3820 error ("Label %s to block does not match in bb %d\n",
3821 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3822 bb->index);
3823 err = 1;
3826 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3827 != current_function_decl)
3829 error ("Label %s has incorrect context in bb %d\n",
3830 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3831 bb->index);
3832 err = 1;
3836 /* Verify that body of basic block BB is free of control flow. */
3837 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3839 tree stmt = bsi_stmt (bsi);
3841 if (found_ctrl_stmt)
3843 error ("Control flow in the middle of basic block %d\n",
3844 bb->index);
3845 err = 1;
3848 if (stmt_ends_bb_p (stmt))
3849 found_ctrl_stmt = true;
3851 if (TREE_CODE (stmt) == LABEL_EXPR)
3853 error ("Label %s in the middle of basic block %d\n",
3854 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3855 bb->index);
3856 err = 1;
3859 bsi = bsi_last (bb);
3860 if (bsi_end_p (bsi))
3861 continue;
3863 stmt = bsi_stmt (bsi);
3865 if (is_ctrl_stmt (stmt))
3867 FOR_EACH_EDGE (e, ei, bb->succs)
3868 if (e->flags & EDGE_FALLTHRU)
3870 error ("Fallthru edge after a control statement in bb %d \n",
3871 bb->index);
3872 err = 1;
3876 switch (TREE_CODE (stmt))
3878 case COND_EXPR:
3880 edge true_edge;
3881 edge false_edge;
3882 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3883 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3885 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3886 err = 1;
3889 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3891 if (!true_edge || !false_edge
3892 || !(true_edge->flags & EDGE_TRUE_VALUE)
3893 || !(false_edge->flags & EDGE_FALSE_VALUE)
3894 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3895 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3896 || EDGE_COUNT (bb->succs) >= 3)
3898 error ("Wrong outgoing edge flags at end of bb %d\n",
3899 bb->index);
3900 err = 1;
3903 if (!has_label_p (true_edge->dest,
3904 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3906 error ("%<then%> label does not match edge at end of bb %d\n",
3907 bb->index);
3908 err = 1;
3911 if (!has_label_p (false_edge->dest,
3912 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3914 error ("%<else%> label does not match edge at end of bb %d\n",
3915 bb->index);
3916 err = 1;
3919 break;
3921 case GOTO_EXPR:
3922 if (simple_goto_p (stmt))
3924 error ("Explicit goto at end of bb %d\n", bb->index);
3925 err = 1;
3927 else
3929 /* FIXME. We should double check that the labels in the
3930 destination blocks have their address taken. */
3931 FOR_EACH_EDGE (e, ei, bb->succs)
3932 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3933 | EDGE_FALSE_VALUE))
3934 || !(e->flags & EDGE_ABNORMAL))
3936 error ("Wrong outgoing edge flags at end of bb %d\n",
3937 bb->index);
3938 err = 1;
3941 break;
3943 case RETURN_EXPR:
3944 if (!single_succ_p (bb)
3945 || (single_succ_edge (bb)->flags
3946 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3947 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3949 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3950 err = 1;
3952 if (single_succ (bb) != EXIT_BLOCK_PTR)
3954 error ("Return edge does not point to exit in bb %d\n",
3955 bb->index);
3956 err = 1;
3958 break;
3960 case SWITCH_EXPR:
3962 tree prev;
3963 edge e;
3964 size_t i, n;
3965 tree vec;
3967 vec = SWITCH_LABELS (stmt);
3968 n = TREE_VEC_LENGTH (vec);
3970 /* Mark all the destination basic blocks. */
3971 for (i = 0; i < n; ++i)
3973 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3974 basic_block label_bb = label_to_block (lab);
3976 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3977 label_bb->aux = (void *)1;
3980 /* Verify that the case labels are sorted. */
3981 prev = TREE_VEC_ELT (vec, 0);
3982 for (i = 1; i < n - 1; ++i)
3984 tree c = TREE_VEC_ELT (vec, i);
3985 if (! CASE_LOW (c))
3987 error ("Found default case not at end of case vector");
3988 err = 1;
3989 continue;
3991 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3993 error ("Case labels not sorted:\n ");
3994 print_generic_expr (stderr, prev, 0);
3995 fprintf (stderr," is greater than ");
3996 print_generic_expr (stderr, c, 0);
3997 fprintf (stderr," but comes before it.\n");
3998 err = 1;
4000 prev = c;
4002 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4004 error ("No default case found at end of case vector");
4005 err = 1;
4008 FOR_EACH_EDGE (e, ei, bb->succs)
4010 if (!e->dest->aux)
4012 error ("Extra outgoing edge %d->%d\n",
4013 bb->index, e->dest->index);
4014 err = 1;
4016 e->dest->aux = (void *)2;
4017 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4018 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4020 error ("Wrong outgoing edge flags at end of bb %d\n",
4021 bb->index);
4022 err = 1;
4026 /* Check that we have all of them. */
4027 for (i = 0; i < n; ++i)
4029 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4030 basic_block label_bb = label_to_block (lab);
4032 if (label_bb->aux != (void *)2)
4034 error ("Missing edge %i->%i",
4035 bb->index, label_bb->index);
4036 err = 1;
4040 FOR_EACH_EDGE (e, ei, bb->succs)
4041 e->dest->aux = (void *)0;
4044 default: ;
4048 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
4049 verify_dominators (CDI_DOMINATORS);
4051 return err;
4055 /* Updates phi nodes after creating a forwarder block joined
4056 by edge FALLTHRU. */
4058 static void
4059 tree_make_forwarder_block (edge fallthru)
4061 edge e;
4062 edge_iterator ei;
4063 basic_block dummy, bb;
4064 tree phi, new_phi, var;
4066 dummy = fallthru->src;
4067 bb = fallthru->dest;
4069 if (single_pred_p (bb))
4070 return;
4072 /* If we redirected a branch we must create new phi nodes at the
4073 start of BB. */
4074 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4076 var = PHI_RESULT (phi);
4077 new_phi = create_phi_node (var, bb);
4078 SSA_NAME_DEF_STMT (var) = new_phi;
4079 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4080 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4083 /* Ensure that the PHI node chain is in the same order. */
4084 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4086 /* Add the arguments we have stored on edges. */
4087 FOR_EACH_EDGE (e, ei, bb->preds)
4089 if (e == fallthru)
4090 continue;
4092 flush_pending_stmts (e);
4097 /* Return true if basic block BB does nothing except pass control
4098 flow to another block and that we can safely insert a label at
4099 the start of the successor block.
4101 As a precondition, we require that BB be not equal to
4102 ENTRY_BLOCK_PTR. */
4104 static bool
4105 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
4107 block_stmt_iterator bsi;
4109 /* BB must have a single outgoing edge. */
4110 if (single_succ_p (bb) != 1
4111 /* If PHI_WANTED is false, BB must not have any PHI nodes.
4112 Otherwise, BB must have PHI nodes. */
4113 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
4114 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
4115 || single_succ (bb) == EXIT_BLOCK_PTR
4116 /* Nor should this be an infinite loop. */
4117 || single_succ (bb) == bb
4118 /* BB may not have an abnormal outgoing edge. */
4119 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4120 return false;
4122 #if ENABLE_CHECKING
4123 gcc_assert (bb != ENTRY_BLOCK_PTR);
4124 #endif
4126 /* Now walk through the statements backward. We can ignore labels,
4127 anything else means this is not a forwarder block. */
4128 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4130 tree stmt = bsi_stmt (bsi);
4132 switch (TREE_CODE (stmt))
4134 case LABEL_EXPR:
4135 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4136 return false;
4137 break;
4139 default:
4140 return false;
4144 if (find_edge (ENTRY_BLOCK_PTR, bb))
4145 return false;
4147 if (current_loops)
4149 basic_block dest;
4150 /* Protect loop latches, headers and preheaders. */
4151 if (bb->loop_father->header == bb)
4152 return false;
4153 dest = EDGE_SUCC (bb, 0)->dest;
4155 if (dest->loop_father->header == dest)
4156 return false;
4159 return true;
4162 /* Return true if BB has at least one abnormal incoming edge. */
4164 static inline bool
4165 has_abnormal_incoming_edge_p (basic_block bb)
4167 edge e;
4168 edge_iterator ei;
4170 FOR_EACH_EDGE (e, ei, bb->preds)
4171 if (e->flags & EDGE_ABNORMAL)
4172 return true;
4174 return false;
4177 /* Removes forwarder block BB. Returns false if this failed. If a new
4178 forwarder block is created due to redirection of edges, it is
4179 stored to worklist. */
4181 static bool
4182 remove_forwarder_block (basic_block bb, basic_block **worklist)
4184 edge succ = single_succ_edge (bb), e, s;
4185 basic_block dest = succ->dest;
4186 tree label;
4187 tree phi;
4188 edge_iterator ei;
4189 block_stmt_iterator bsi, bsi_to;
4190 bool seen_abnormal_edge = false;
4192 /* We check for infinite loops already in tree_forwarder_block_p.
4193 However it may happen that the infinite loop is created
4194 afterwards due to removal of forwarders. */
4195 if (dest == bb)
4196 return false;
4198 /* If the destination block consists of a nonlocal label, do not merge
4199 it. */
4200 label = first_stmt (dest);
4201 if (label
4202 && TREE_CODE (label) == LABEL_EXPR
4203 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4204 return false;
4206 /* If there is an abnormal edge to basic block BB, but not into
4207 dest, problems might occur during removal of the phi node at out
4208 of ssa due to overlapping live ranges of registers.
4210 If there is an abnormal edge in DEST, the problems would occur
4211 anyway since cleanup_dead_labels would then merge the labels for
4212 two different eh regions, and rest of exception handling code
4213 does not like it.
4215 So if there is an abnormal edge to BB, proceed only if there is
4216 no abnormal edge to DEST and there are no phi nodes in DEST. */
4217 if (has_abnormal_incoming_edge_p (bb))
4219 seen_abnormal_edge = true;
4221 if (has_abnormal_incoming_edge_p (dest)
4222 || phi_nodes (dest) != NULL_TREE)
4223 return false;
4226 /* If there are phi nodes in DEST, and some of the blocks that are
4227 predecessors of BB are also predecessors of DEST, check that the
4228 phi node arguments match. */
4229 if (phi_nodes (dest))
4231 FOR_EACH_EDGE (e, ei, bb->preds)
4233 s = find_edge (e->src, dest);
4234 if (!s)
4235 continue;
4237 if (!phi_alternatives_equal (dest, succ, s))
4238 return false;
4242 /* Redirect the edges. */
4243 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4245 if (e->flags & EDGE_ABNORMAL)
4247 /* If there is an abnormal edge, redirect it anyway, and
4248 move the labels to the new block to make it legal. */
4249 s = redirect_edge_succ_nodup (e, dest);
4251 else
4252 s = redirect_edge_and_branch (e, dest);
4254 if (s == e)
4256 /* Create arguments for the phi nodes, since the edge was not
4257 here before. */
4258 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4259 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4261 else
4263 /* The source basic block might become a forwarder. We know
4264 that it was not a forwarder before, since it used to have
4265 at least two outgoing edges, so we may just add it to
4266 worklist. */
4267 if (tree_forwarder_block_p (s->src, false))
4268 *(*worklist)++ = s->src;
4272 if (seen_abnormal_edge)
4274 /* Move the labels to the new block, so that the redirection of
4275 the abnormal edges works. */
4277 bsi_to = bsi_start (dest);
4278 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4280 label = bsi_stmt (bsi);
4281 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4282 bsi_remove (&bsi);
4283 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4287 /* Update the dominators. */
4288 if (dom_info_available_p (CDI_DOMINATORS))
4290 basic_block dom, dombb, domdest;
4292 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4293 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4294 if (domdest == bb)
4296 /* Shortcut to avoid calling (relatively expensive)
4297 nearest_common_dominator unless necessary. */
4298 dom = dombb;
4300 else
4301 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4303 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4306 /* And kill the forwarder block. */
4307 delete_basic_block (bb);
4309 return true;
4312 /* Removes forwarder blocks. */
4314 static bool
4315 cleanup_forwarder_blocks (void)
4317 basic_block bb;
4318 bool changed = false;
4319 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4320 basic_block *current = worklist;
4322 FOR_EACH_BB (bb)
4324 if (tree_forwarder_block_p (bb, false))
4325 *current++ = bb;
4328 while (current != worklist)
4330 bb = *--current;
4331 changed |= remove_forwarder_block (bb, &current);
4334 free (worklist);
4335 return changed;
4338 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4340 static void
4341 remove_forwarder_block_with_phi (basic_block bb)
4343 edge succ = single_succ_edge (bb);
4344 basic_block dest = succ->dest;
4345 tree label;
4346 basic_block dombb, domdest, dom;
4348 /* We check for infinite loops already in tree_forwarder_block_p.
4349 However it may happen that the infinite loop is created
4350 afterwards due to removal of forwarders. */
4351 if (dest == bb)
4352 return;
4354 /* If the destination block consists of a nonlocal label, do not
4355 merge it. */
4356 label = first_stmt (dest);
4357 if (label
4358 && TREE_CODE (label) == LABEL_EXPR
4359 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4360 return;
4362 /* Redirect each incoming edge to BB to DEST. */
4363 while (EDGE_COUNT (bb->preds) > 0)
4365 edge e = EDGE_PRED (bb, 0), s;
4366 tree phi;
4368 s = find_edge (e->src, dest);
4369 if (s)
4371 /* We already have an edge S from E->src to DEST. If S and
4372 E->dest's sole successor edge have the same PHI arguments
4373 at DEST, redirect S to DEST. */
4374 if (phi_alternatives_equal (dest, s, succ))
4376 e = redirect_edge_and_branch (e, dest);
4377 PENDING_STMT (e) = NULL_TREE;
4378 continue;
4381 /* PHI arguments are different. Create a forwarder block by
4382 splitting E so that we can merge PHI arguments on E to
4383 DEST. */
4384 e = single_succ_edge (split_edge (e));
4387 s = redirect_edge_and_branch (e, dest);
4389 /* redirect_edge_and_branch must not create a new edge. */
4390 gcc_assert (s == e);
4392 /* Add to the PHI nodes at DEST each PHI argument removed at the
4393 destination of E. */
4394 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4396 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4398 if (TREE_CODE (def) == SSA_NAME)
4400 tree var;
4402 /* If DEF is one of the results of PHI nodes removed during
4403 redirection, replace it with the PHI argument that used
4404 to be on E. */
4405 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4407 tree old_arg = TREE_PURPOSE (var);
4408 tree new_arg = TREE_VALUE (var);
4410 if (def == old_arg)
4412 def = new_arg;
4413 break;
4418 add_phi_arg (phi, def, s);
4421 PENDING_STMT (e) = NULL;
4424 /* Update the dominators. */
4425 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4426 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4427 if (domdest == bb)
4429 /* Shortcut to avoid calling (relatively expensive)
4430 nearest_common_dominator unless necessary. */
4431 dom = dombb;
4433 else
4434 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4436 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4438 /* Remove BB since all of BB's incoming edges have been redirected
4439 to DEST. */
4440 delete_basic_block (bb);
4443 /* This pass merges PHI nodes if one feeds into another. For example,
4444 suppose we have the following:
4446 goto <bb 9> (<L9>);
4448 <L8>:;
4449 tem_17 = foo ();
4451 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4452 <L9>:;
4454 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4455 <L10>:;
4457 Then we merge the first PHI node into the second one like so:
4459 goto <bb 9> (<L10>);
4461 <L8>:;
4462 tem_17 = foo ();
4464 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4465 <L10>:;
4468 static void
4469 merge_phi_nodes (void)
4471 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4472 basic_block *current = worklist;
4473 basic_block bb;
4475 calculate_dominance_info (CDI_DOMINATORS);
4477 /* Find all PHI nodes that we may be able to merge. */
4478 FOR_EACH_BB (bb)
4480 basic_block dest;
4482 /* Look for a forwarder block with PHI nodes. */
4483 if (!tree_forwarder_block_p (bb, true))
4484 continue;
4486 dest = single_succ (bb);
4488 /* We have to feed into another basic block with PHI
4489 nodes. */
4490 if (!phi_nodes (dest)
4491 /* We don't want to deal with a basic block with
4492 abnormal edges. */
4493 || has_abnormal_incoming_edge_p (bb))
4494 continue;
4496 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4498 /* If BB does not dominate DEST, then the PHI nodes at
4499 DEST must be the only users of the results of the PHI
4500 nodes at BB. */
4501 *current++ = bb;
4505 /* Now let's drain WORKLIST. */
4506 while (current != worklist)
4508 bb = *--current;
4509 remove_forwarder_block_with_phi (bb);
4512 free (worklist);
4515 static bool
4516 gate_merge_phi (void)
4518 return 1;
4521 struct tree_opt_pass pass_merge_phi = {
4522 "mergephi", /* name */
4523 gate_merge_phi, /* gate */
4524 merge_phi_nodes, /* execute */
4525 NULL, /* sub */
4526 NULL, /* next */
4527 0, /* static_pass_number */
4528 TV_TREE_MERGE_PHI, /* tv_id */
4529 PROP_cfg | PROP_ssa, /* properties_required */
4530 0, /* properties_provided */
4531 0, /* properties_destroyed */
4532 0, /* todo_flags_start */
4533 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4534 | TODO_verify_ssa,
4535 0 /* letter */
4538 /* Return a non-special label in the head of basic block BLOCK.
4539 Create one if it doesn't exist. */
4541 tree
4542 tree_block_label (basic_block bb)
4544 block_stmt_iterator i, s = bsi_start (bb);
4545 bool first = true;
4546 tree label, stmt;
4548 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4550 stmt = bsi_stmt (i);
4551 if (TREE_CODE (stmt) != LABEL_EXPR)
4552 break;
4553 label = LABEL_EXPR_LABEL (stmt);
4554 if (!DECL_NONLOCAL (label))
4556 if (!first)
4557 bsi_move_before (&i, &s);
4558 return label;
4562 label = create_artificial_label ();
4563 stmt = build1 (LABEL_EXPR, void_type_node, label);
4564 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4565 return label;
4569 /* Attempt to perform edge redirection by replacing a possibly complex
4570 jump instruction by a goto or by removing the jump completely.
4571 This can apply only if all edges now point to the same block. The
4572 parameters and return values are equivalent to
4573 redirect_edge_and_branch. */
4575 static edge
4576 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4578 basic_block src = e->src;
4579 block_stmt_iterator b;
4580 tree stmt;
4582 /* We can replace or remove a complex jump only when we have exactly
4583 two edges. */
4584 if (EDGE_COUNT (src->succs) != 2
4585 /* Verify that all targets will be TARGET. Specifically, the
4586 edge that is not E must also go to TARGET. */
4587 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4588 return NULL;
4590 b = bsi_last (src);
4591 if (bsi_end_p (b))
4592 return NULL;
4593 stmt = bsi_stmt (b);
4595 if (TREE_CODE (stmt) == COND_EXPR
4596 || TREE_CODE (stmt) == SWITCH_EXPR)
4598 bsi_remove (&b);
4599 e = ssa_redirect_edge (e, target);
4600 e->flags = EDGE_FALLTHRU;
4601 return e;
4604 return NULL;
4608 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4609 edge representing the redirected branch. */
4611 static edge
4612 tree_redirect_edge_and_branch (edge e, basic_block dest)
4614 basic_block bb = e->src;
4615 block_stmt_iterator bsi;
4616 edge ret;
4617 tree label, stmt;
4619 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4620 return NULL;
4622 if (e->src != ENTRY_BLOCK_PTR
4623 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4624 return ret;
4626 if (e->dest == dest)
4627 return NULL;
4629 label = tree_block_label (dest);
4631 bsi = bsi_last (bb);
4632 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4634 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4636 case COND_EXPR:
4637 stmt = (e->flags & EDGE_TRUE_VALUE
4638 ? COND_EXPR_THEN (stmt)
4639 : COND_EXPR_ELSE (stmt));
4640 GOTO_DESTINATION (stmt) = label;
4641 break;
4643 case GOTO_EXPR:
4644 /* No non-abnormal edges should lead from a non-simple goto, and
4645 simple ones should be represented implicitly. */
4646 gcc_unreachable ();
4648 case SWITCH_EXPR:
4650 tree cases = get_cases_for_edge (e, stmt);
4652 /* If we have a list of cases associated with E, then use it
4653 as it's a lot faster than walking the entire case vector. */
4654 if (cases)
4656 edge e2 = find_edge (e->src, dest);
4657 tree last, first;
4659 first = cases;
4660 while (cases)
4662 last = cases;
4663 CASE_LABEL (cases) = label;
4664 cases = TREE_CHAIN (cases);
4667 /* If there was already an edge in the CFG, then we need
4668 to move all the cases associated with E to E2. */
4669 if (e2)
4671 tree cases2 = get_cases_for_edge (e2, stmt);
4673 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4674 TREE_CHAIN (cases2) = first;
4677 else
4679 tree vec = SWITCH_LABELS (stmt);
4680 size_t i, n = TREE_VEC_LENGTH (vec);
4682 for (i = 0; i < n; i++)
4684 tree elt = TREE_VEC_ELT (vec, i);
4686 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4687 CASE_LABEL (elt) = label;
4691 break;
4694 case RETURN_EXPR:
4695 bsi_remove (&bsi);
4696 e->flags |= EDGE_FALLTHRU;
4697 break;
4699 default:
4700 /* Otherwise it must be a fallthru edge, and we don't need to
4701 do anything besides redirecting it. */
4702 gcc_assert (e->flags & EDGE_FALLTHRU);
4703 break;
4706 /* Update/insert PHI nodes as necessary. */
4708 /* Now update the edges in the CFG. */
4709 e = ssa_redirect_edge (e, dest);
4711 return e;
4715 /* Simple wrapper, as we can always redirect fallthru edges. */
4717 static basic_block
4718 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4720 e = tree_redirect_edge_and_branch (e, dest);
4721 gcc_assert (e);
4723 return NULL;
4727 /* Splits basic block BB after statement STMT (but at least after the
4728 labels). If STMT is NULL, BB is split just after the labels. */
4730 static basic_block
4731 tree_split_block (basic_block bb, void *stmt)
4733 block_stmt_iterator bsi, bsi_tgt;
4734 tree act;
4735 basic_block new_bb;
4736 edge e;
4737 edge_iterator ei;
4739 new_bb = create_empty_bb (bb);
4741 /* Redirect the outgoing edges. */
4742 new_bb->succs = bb->succs;
4743 bb->succs = NULL;
4744 FOR_EACH_EDGE (e, ei, new_bb->succs)
4745 e->src = new_bb;
4747 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4748 stmt = NULL;
4750 /* Move everything from BSI to the new basic block. */
4751 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4753 act = bsi_stmt (bsi);
4754 if (TREE_CODE (act) == LABEL_EXPR)
4755 continue;
4757 if (!stmt)
4758 break;
4760 if (stmt == act)
4762 bsi_next (&bsi);
4763 break;
4767 bsi_tgt = bsi_start (new_bb);
4768 while (!bsi_end_p (bsi))
4770 act = bsi_stmt (bsi);
4771 bsi_remove (&bsi);
4772 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4775 return new_bb;
4779 /* Moves basic block BB after block AFTER. */
4781 static bool
4782 tree_move_block_after (basic_block bb, basic_block after)
4784 if (bb->prev_bb == after)
4785 return true;
4787 unlink_block (bb);
4788 link_block (bb, after);
4790 return true;
4794 /* Return true if basic_block can be duplicated. */
4796 static bool
4797 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4799 return true;
4802 /* Create a duplicate of the basic block BB. NOTE: This does not
4803 preserve SSA form. */
4805 static basic_block
4806 tree_duplicate_bb (basic_block bb)
4808 basic_block new_bb;
4809 block_stmt_iterator bsi, bsi_tgt;
4810 tree phi, val;
4811 ssa_op_iter op_iter;
4813 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4815 /* First copy the phi nodes. We do not copy phi node arguments here,
4816 since the edges are not ready yet. Keep the chain of phi nodes in
4817 the same order, so that we can add them later. */
4818 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4820 mark_for_rewrite (PHI_RESULT (phi));
4821 create_phi_node (PHI_RESULT (phi), new_bb);
4823 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4825 bsi_tgt = bsi_start (new_bb);
4826 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4828 tree stmt = bsi_stmt (bsi);
4829 tree copy;
4831 if (TREE_CODE (stmt) == LABEL_EXPR)
4832 continue;
4834 /* Record the definitions. */
4835 get_stmt_operands (stmt);
4837 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4838 mark_for_rewrite (val);
4840 copy = unshare_expr (stmt);
4842 /* Copy also the virtual operands. */
4843 get_stmt_ann (copy);
4844 copy_virtual_operands (copy, stmt);
4846 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4849 return new_bb;
4852 /* Basic block BB_COPY was created by code duplication. Add phi node
4853 arguments for edges going out of BB_COPY. The blocks that were
4854 duplicated have rbi->duplicated set to one. */
4856 void
4857 add_phi_args_after_copy_bb (basic_block bb_copy)
4859 basic_block bb, dest;
4860 edge e, e_copy;
4861 edge_iterator ei;
4862 tree phi, phi_copy, phi_next, def;
4864 bb = bb_copy->rbi->original;
4866 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4868 if (!phi_nodes (e_copy->dest))
4869 continue;
4871 if (e_copy->dest->rbi->duplicated)
4872 dest = e_copy->dest->rbi->original;
4873 else
4874 dest = e_copy->dest;
4876 e = find_edge (bb, dest);
4877 if (!e)
4879 /* During loop unrolling the target of the latch edge is copied.
4880 In this case we are not looking for edge to dest, but to
4881 duplicated block whose original was dest. */
4882 FOR_EACH_EDGE (e, ei, bb->succs)
4883 if (e->dest->rbi->duplicated
4884 && e->dest->rbi->original == dest)
4885 break;
4887 gcc_assert (e != NULL);
4890 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4891 phi;
4892 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4894 phi_next = PHI_CHAIN (phi);
4896 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4897 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4898 add_phi_arg (phi_copy, def, e_copy);
4903 /* Blocks in REGION_COPY array of length N_REGION were created by
4904 duplication of basic blocks. Add phi node arguments for edges
4905 going from these blocks. */
4907 void
4908 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4910 unsigned i;
4912 for (i = 0; i < n_region; i++)
4913 region_copy[i]->rbi->duplicated = 1;
4915 for (i = 0; i < n_region; i++)
4916 add_phi_args_after_copy_bb (region_copy[i]);
4918 for (i = 0; i < n_region; i++)
4919 region_copy[i]->rbi->duplicated = 0;
4922 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4924 struct ssa_name_map_entry
4926 tree from_name;
4927 tree to_name;
4930 /* Hash function for ssa_name_map_entry. */
4932 static hashval_t
4933 ssa_name_map_entry_hash (const void *entry)
4935 const struct ssa_name_map_entry *en = entry;
4936 return SSA_NAME_VERSION (en->from_name);
4939 /* Equality function for ssa_name_map_entry. */
4941 static int
4942 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4944 const struct ssa_name_map_entry *en = in_table;
4946 return en->from_name == ssa_name;
4949 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4950 to MAP. */
4952 void
4953 allocate_ssa_names (bitmap definitions, htab_t *map)
4955 tree name;
4956 struct ssa_name_map_entry *entry;
4957 PTR *slot;
4958 unsigned ver;
4959 bitmap_iterator bi;
4961 if (!*map)
4962 *map = htab_create (10, ssa_name_map_entry_hash,
4963 ssa_name_map_entry_eq, free);
4964 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4966 name = ssa_name (ver);
4967 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4968 INSERT);
4969 if (*slot)
4970 entry = *slot;
4971 else
4973 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4974 entry->from_name = name;
4975 *slot = entry;
4977 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4981 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4982 by the mapping MAP. */
4984 static void
4985 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4987 tree name = DEF_FROM_PTR (def);
4988 struct ssa_name_map_entry *entry;
4990 gcc_assert (TREE_CODE (name) == SSA_NAME);
4992 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4993 if (!entry)
4994 return;
4996 SET_DEF (def, entry->to_name);
4997 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
5000 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
5002 static void
5003 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
5005 tree name = USE_FROM_PTR (use);
5006 struct ssa_name_map_entry *entry;
5008 if (TREE_CODE (name) != SSA_NAME)
5009 return;
5011 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
5012 if (!entry)
5013 return;
5015 SET_USE (use, entry->to_name);
5018 /* Rewrite the ssa names in basic block BB to new ones as specified by the
5019 mapping MAP. */
5021 void
5022 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
5024 unsigned i;
5025 edge e;
5026 edge_iterator ei;
5027 tree phi, stmt;
5028 block_stmt_iterator bsi;
5029 use_optype uses;
5030 vuse_optype vuses;
5031 def_optype defs;
5032 v_may_def_optype v_may_defs;
5033 v_must_def_optype v_must_defs;
5034 stmt_ann_t ann;
5036 FOR_EACH_EDGE (e, ei, bb->preds)
5037 if (e->flags & EDGE_ABNORMAL)
5038 break;
5040 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5042 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
5043 if (e)
5044 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
5047 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5049 stmt = bsi_stmt (bsi);
5050 get_stmt_operands (stmt);
5051 ann = stmt_ann (stmt);
5053 uses = USE_OPS (ann);
5054 for (i = 0; i < NUM_USES (uses); i++)
5055 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
5057 defs = DEF_OPS (ann);
5058 for (i = 0; i < NUM_DEFS (defs); i++)
5059 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
5061 vuses = VUSE_OPS (ann);
5062 for (i = 0; i < NUM_VUSES (vuses); i++)
5063 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
5065 v_may_defs = V_MAY_DEF_OPS (ann);
5066 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
5068 rewrite_to_new_ssa_names_use
5069 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
5070 rewrite_to_new_ssa_names_def
5071 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
5074 v_must_defs = V_MUST_DEF_OPS (ann);
5075 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
5077 rewrite_to_new_ssa_names_def
5078 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
5079 rewrite_to_new_ssa_names_use
5080 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
5084 FOR_EACH_EDGE (e, ei, bb->succs)
5085 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
5087 rewrite_to_new_ssa_names_use
5088 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
5090 if (e->flags & EDGE_ABNORMAL)
5092 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
5093 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
5098 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
5099 by the mapping MAP. */
5101 void
5102 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
5104 unsigned r;
5106 for (r = 0; r < n_region; r++)
5107 rewrite_to_new_ssa_names_bb (region[r], map);
5110 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5111 important exit edge EXIT. By important we mean that no SSA name defined
5112 inside region is live over the other exit edges of the region. All entry
5113 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5114 to the duplicate of the region. SSA form, dominance and loop information
5115 is updated. The new basic blocks are stored to REGION_COPY in the same
5116 order as they had in REGION, provided that REGION_COPY is not NULL.
5117 The function returns false if it is unable to copy the region,
5118 true otherwise. */
5120 bool
5121 tree_duplicate_sese_region (edge entry, edge exit,
5122 basic_block *region, unsigned n_region,
5123 basic_block *region_copy)
5125 unsigned i, n_doms, ver;
5126 bool free_region_copy = false, copying_header = false;
5127 struct loop *loop = entry->dest->loop_father;
5128 edge exit_copy;
5129 bitmap definitions;
5130 tree phi;
5131 basic_block *doms;
5132 htab_t ssa_name_map = NULL;
5133 edge redirected;
5134 bitmap_iterator bi;
5136 if (!can_copy_bbs_p (region, n_region))
5137 return false;
5139 /* Some sanity checking. Note that we do not check for all possible
5140 missuses of the functions. I.e. if you ask to copy something weird,
5141 it will work, but the state of structures probably will not be
5142 correct. */
5144 for (i = 0; i < n_region; i++)
5146 /* We do not handle subloops, i.e. all the blocks must belong to the
5147 same loop. */
5148 if (region[i]->loop_father != loop)
5149 return false;
5151 if (region[i] != entry->dest
5152 && region[i] == loop->header)
5153 return false;
5156 loop->copy = loop;
5158 /* In case the function is used for loop header copying (which is the primary
5159 use), ensure that EXIT and its copy will be new latch and entry edges. */
5160 if (loop->header == entry->dest)
5162 copying_header = true;
5163 loop->copy = loop->outer;
5165 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5166 return false;
5168 for (i = 0; i < n_region; i++)
5169 if (region[i] != exit->src
5170 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5171 return false;
5174 if (!region_copy)
5176 region_copy = xmalloc (sizeof (basic_block) * n_region);
5177 free_region_copy = true;
5180 gcc_assert (!any_marked_for_rewrite_p ());
5182 /* Record blocks outside the region that are duplicated by something
5183 inside. */
5184 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
5185 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
5187 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
5188 definitions = marked_ssa_names ();
5190 if (copying_header)
5192 loop->header = exit->dest;
5193 loop->latch = exit->src;
5196 /* Redirect the entry and add the phi node arguments. */
5197 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
5198 gcc_assert (redirected != NULL);
5199 flush_pending_stmts (entry);
5201 /* Concerning updating of dominators: We must recount dominators
5202 for entry block and its copy. Anything that is outside of the region, but
5203 was dominated by something inside needs recounting as well. */
5204 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5205 doms[n_doms++] = entry->dest->rbi->original;
5206 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
5207 free (doms);
5209 /* Add the other phi node arguments. */
5210 add_phi_args_after_copy (region_copy, n_region);
5212 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
5213 uses, it should be possible to emit phi nodes just for definitions that
5214 are used outside region. */
5215 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
5217 tree name = ssa_name (ver);
5219 phi = create_phi_node (name, exit->dest);
5220 add_phi_arg (phi, name, exit);
5221 add_phi_arg (phi, name, exit_copy);
5223 SSA_NAME_DEF_STMT (name) = phi;
5226 /* And create new definitions inside region and its copy. TODO -- once we
5227 have immediate uses, it might be better to leave definitions in region
5228 unchanged, create new ssa names for phi nodes on exit, and rewrite
5229 the uses, to avoid changing the copied region. */
5230 allocate_ssa_names (definitions, &ssa_name_map);
5231 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
5232 allocate_ssa_names (definitions, &ssa_name_map);
5233 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
5234 htab_delete (ssa_name_map);
5236 if (free_region_copy)
5237 free (region_copy);
5239 unmark_all_for_rewrite ();
5240 BITMAP_FREE (definitions);
5242 return true;
5245 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5247 void
5248 dump_function_to_file (tree fn, FILE *file, int flags)
5250 tree arg, vars, var;
5251 bool ignore_topmost_bind = false, any_var = false;
5252 basic_block bb;
5253 tree chain;
5255 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5257 arg = DECL_ARGUMENTS (fn);
5258 while (arg)
5260 print_generic_expr (file, arg, dump_flags);
5261 if (TREE_CHAIN (arg))
5262 fprintf (file, ", ");
5263 arg = TREE_CHAIN (arg);
5265 fprintf (file, ")\n");
5267 if (flags & TDF_RAW)
5269 dump_node (fn, TDF_SLIM | flags, file);
5270 return;
5273 /* When GIMPLE is lowered, the variables are no longer available in
5274 BIND_EXPRs, so display them separately. */
5275 if (cfun && cfun->unexpanded_var_list)
5277 ignore_topmost_bind = true;
5279 fprintf (file, "{\n");
5280 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5282 var = TREE_VALUE (vars);
5284 print_generic_decl (file, var, flags);
5285 fprintf (file, "\n");
5287 any_var = true;
5291 if (basic_block_info)
5293 /* Make a CFG based dump. */
5294 check_bb_profile (ENTRY_BLOCK_PTR, file);
5295 if (!ignore_topmost_bind)
5296 fprintf (file, "{\n");
5298 if (any_var && n_basic_blocks)
5299 fprintf (file, "\n");
5301 FOR_EACH_BB (bb)
5302 dump_generic_bb (file, bb, 2, flags);
5304 fprintf (file, "}\n");
5305 check_bb_profile (EXIT_BLOCK_PTR, file);
5307 else
5309 int indent;
5311 /* Make a tree based dump. */
5312 chain = DECL_SAVED_TREE (fn);
5314 if (TREE_CODE (chain) == BIND_EXPR)
5316 if (ignore_topmost_bind)
5318 chain = BIND_EXPR_BODY (chain);
5319 indent = 2;
5321 else
5322 indent = 0;
5324 else
5326 if (!ignore_topmost_bind)
5327 fprintf (file, "{\n");
5328 indent = 2;
5331 if (any_var)
5332 fprintf (file, "\n");
5334 print_generic_stmt_indented (file, chain, flags, indent);
5335 if (ignore_topmost_bind)
5336 fprintf (file, "}\n");
5339 fprintf (file, "\n\n");
5343 /* Pretty print of the loops intermediate representation. */
5344 static void print_loop (FILE *, struct loop *, int);
5345 static void print_pred_bbs (FILE *, basic_block bb);
5346 static void print_succ_bbs (FILE *, basic_block bb);
5349 /* Print the predecessors indexes of edge E on FILE. */
5351 static void
5352 print_pred_bbs (FILE *file, basic_block bb)
5354 edge e;
5355 edge_iterator ei;
5357 FOR_EACH_EDGE (e, ei, bb->preds)
5358 fprintf (file, "bb_%d", e->src->index);
5362 /* Print the successors indexes of edge E on FILE. */
5364 static void
5365 print_succ_bbs (FILE *file, basic_block bb)
5367 edge e;
5368 edge_iterator ei;
5370 FOR_EACH_EDGE (e, ei, bb->succs)
5371 fprintf (file, "bb_%d", e->src->index);
5375 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5377 static void
5378 print_loop (FILE *file, struct loop *loop, int indent)
5380 char *s_indent;
5381 basic_block bb;
5383 if (loop == NULL)
5384 return;
5386 s_indent = (char *) alloca ((size_t) indent + 1);
5387 memset ((void *) s_indent, ' ', (size_t) indent);
5388 s_indent[indent] = '\0';
5390 /* Print the loop's header. */
5391 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5393 /* Print the loop's body. */
5394 fprintf (file, "%s{\n", s_indent);
5395 FOR_EACH_BB (bb)
5396 if (bb->loop_father == loop)
5398 /* Print the basic_block's header. */
5399 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5400 print_pred_bbs (file, bb);
5401 fprintf (file, "}, succs = {");
5402 print_succ_bbs (file, bb);
5403 fprintf (file, "})\n");
5405 /* Print the basic_block's body. */
5406 fprintf (file, "%s {\n", s_indent);
5407 tree_dump_bb (bb, file, indent + 4);
5408 fprintf (file, "%s }\n", s_indent);
5411 print_loop (file, loop->inner, indent + 2);
5412 fprintf (file, "%s}\n", s_indent);
5413 print_loop (file, loop->next, indent);
5417 /* Follow a CFG edge from the entry point of the program, and on entry
5418 of a loop, pretty print the loop structure on FILE. */
5420 void
5421 print_loop_ir (FILE *file)
5423 basic_block bb;
5425 bb = BASIC_BLOCK (0);
5426 if (bb && bb->loop_father)
5427 print_loop (file, bb->loop_father, 0);
5431 /* Debugging loops structure at tree level. */
5433 void
5434 debug_loop_ir (void)
5436 print_loop_ir (stderr);
5440 /* Return true if BB ends with a call, possibly followed by some
5441 instructions that must stay with the call. Return false,
5442 otherwise. */
5444 static bool
5445 tree_block_ends_with_call_p (basic_block bb)
5447 block_stmt_iterator bsi = bsi_last (bb);
5448 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5452 /* Return true if BB ends with a conditional branch. Return false,
5453 otherwise. */
5455 static bool
5456 tree_block_ends_with_condjump_p (basic_block bb)
5458 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5459 return (TREE_CODE (stmt) == COND_EXPR);
5463 /* Return true if we need to add fake edge to exit at statement T.
5464 Helper function for tree_flow_call_edges_add. */
5466 static bool
5467 need_fake_edge_p (tree t)
5469 tree call;
5471 /* NORETURN and LONGJMP calls already have an edge to exit.
5472 CONST and PURE calls do not need one.
5473 We don't currently check for CONST and PURE here, although
5474 it would be a good idea, because those attributes are
5475 figured out from the RTL in mark_constant_function, and
5476 the counter incrementation code from -fprofile-arcs
5477 leads to different results from -fbranch-probabilities. */
5478 call = get_call_expr_in (t);
5479 if (call
5480 && !(call_expr_flags (call) & ECF_NORETURN))
5481 return true;
5483 if (TREE_CODE (t) == ASM_EXPR
5484 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5485 return true;
5487 return false;
5491 /* Add fake edges to the function exit for any non constant and non
5492 noreturn calls, volatile inline assembly in the bitmap of blocks
5493 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5494 the number of blocks that were split.
5496 The goal is to expose cases in which entering a basic block does
5497 not imply that all subsequent instructions must be executed. */
5499 static int
5500 tree_flow_call_edges_add (sbitmap blocks)
5502 int i;
5503 int blocks_split = 0;
5504 int last_bb = last_basic_block;
5505 bool check_last_block = false;
5507 if (n_basic_blocks == 0)
5508 return 0;
5510 if (! blocks)
5511 check_last_block = true;
5512 else
5513 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5515 /* In the last basic block, before epilogue generation, there will be
5516 a fallthru edge to EXIT. Special care is required if the last insn
5517 of the last basic block is a call because make_edge folds duplicate
5518 edges, which would result in the fallthru edge also being marked
5519 fake, which would result in the fallthru edge being removed by
5520 remove_fake_edges, which would result in an invalid CFG.
5522 Moreover, we can't elide the outgoing fake edge, since the block
5523 profiler needs to take this into account in order to solve the minimal
5524 spanning tree in the case that the call doesn't return.
5526 Handle this by adding a dummy instruction in a new last basic block. */
5527 if (check_last_block)
5529 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5530 block_stmt_iterator bsi = bsi_last (bb);
5531 tree t = NULL_TREE;
5532 if (!bsi_end_p (bsi))
5533 t = bsi_stmt (bsi);
5535 if (need_fake_edge_p (t))
5537 edge e;
5539 e = find_edge (bb, EXIT_BLOCK_PTR);
5540 if (e)
5542 bsi_insert_on_edge (e, build_empty_stmt ());
5543 bsi_commit_edge_inserts ();
5548 /* Now add fake edges to the function exit for any non constant
5549 calls since there is no way that we can determine if they will
5550 return or not... */
5551 for (i = 0; i < last_bb; i++)
5553 basic_block bb = BASIC_BLOCK (i);
5554 block_stmt_iterator bsi;
5555 tree stmt, last_stmt;
5557 if (!bb)
5558 continue;
5560 if (blocks && !TEST_BIT (blocks, i))
5561 continue;
5563 bsi = bsi_last (bb);
5564 if (!bsi_end_p (bsi))
5566 last_stmt = bsi_stmt (bsi);
5569 stmt = bsi_stmt (bsi);
5570 if (need_fake_edge_p (stmt))
5572 edge e;
5573 /* The handling above of the final block before the
5574 epilogue should be enough to verify that there is
5575 no edge to the exit block in CFG already.
5576 Calling make_edge in such case would cause us to
5577 mark that edge as fake and remove it later. */
5578 #ifdef ENABLE_CHECKING
5579 if (stmt == last_stmt)
5581 e = find_edge (bb, EXIT_BLOCK_PTR);
5582 gcc_assert (e == NULL);
5584 #endif
5586 /* Note that the following may create a new basic block
5587 and renumber the existing basic blocks. */
5588 if (stmt != last_stmt)
5590 e = split_block (bb, stmt);
5591 if (e)
5592 blocks_split++;
5594 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5596 bsi_prev (&bsi);
5598 while (!bsi_end_p (bsi));
5602 if (blocks_split)
5603 verify_flow_info ();
5605 return blocks_split;
5608 bool
5609 tree_purge_dead_eh_edges (basic_block bb)
5611 bool changed = false;
5612 edge e;
5613 edge_iterator ei;
5614 tree stmt = last_stmt (bb);
5616 if (stmt && tree_can_throw_internal (stmt))
5617 return false;
5619 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5621 if (e->flags & EDGE_EH)
5623 remove_edge (e);
5624 changed = true;
5626 else
5627 ei_next (&ei);
5630 /* Removal of dead EH edges might change dominators of not
5631 just immediate successors. E.g. when bb1 is changed so that
5632 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5633 eh edges purged by this function in:
5637 1-->2
5638 / \ |
5639 v v |
5640 3-->4 |
5642 --->5
5645 idom(bb5) must be recomputed. For now just free the dominance
5646 info. */
5647 if (changed)
5648 free_dominance_info (CDI_DOMINATORS);
5650 return changed;
5653 bool
5654 tree_purge_all_dead_eh_edges (bitmap blocks)
5656 bool changed = false;
5657 unsigned i;
5658 bitmap_iterator bi;
5660 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5662 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5665 return changed;
5668 /* This function is called whenever a new edge is created or
5669 redirected. */
5671 static void
5672 tree_execute_on_growing_pred (edge e)
5674 basic_block bb = e->dest;
5676 if (phi_nodes (bb))
5677 reserve_phi_args_for_new_edge (bb);
5680 /* This function is called immediately before edge E is removed from
5681 the edge vector E->dest->preds. */
5683 static void
5684 tree_execute_on_shrinking_pred (edge e)
5686 if (phi_nodes (e->dest))
5687 remove_phi_args (e);
5690 struct cfg_hooks tree_cfg_hooks = {
5691 "tree",
5692 tree_verify_flow_info,
5693 tree_dump_bb, /* dump_bb */
5694 create_bb, /* create_basic_block */
5695 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5696 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5697 remove_bb, /* delete_basic_block */
5698 tree_split_block, /* split_block */
5699 tree_move_block_after, /* move_block_after */
5700 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5701 tree_merge_blocks, /* merge_blocks */
5702 tree_predict_edge, /* predict_edge */
5703 tree_predicted_by_p, /* predicted_by_p */
5704 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5705 tree_duplicate_bb, /* duplicate_block */
5706 tree_split_edge, /* split_edge */
5707 tree_make_forwarder_block, /* make_forward_block */
5708 NULL, /* tidy_fallthru_edge */
5709 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5710 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5711 tree_flow_call_edges_add, /* flow_call_edges_add */
5712 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5713 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5717 /* Split all critical edges. */
5719 static void
5720 split_critical_edges (void)
5722 basic_block bb;
5723 edge e;
5724 edge_iterator ei;
5726 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5727 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5728 mappings around the calls to split_edge. */
5729 start_recording_case_labels ();
5730 FOR_ALL_BB (bb)
5732 FOR_EACH_EDGE (e, ei, bb->succs)
5733 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5735 split_edge (e);
5738 end_recording_case_labels ();
5741 struct tree_opt_pass pass_split_crit_edges =
5743 "crited", /* name */
5744 NULL, /* gate */
5745 split_critical_edges, /* execute */
5746 NULL, /* sub */
5747 NULL, /* next */
5748 0, /* static_pass_number */
5749 TV_TREE_SPLIT_EDGES, /* tv_id */
5750 PROP_cfg, /* properties required */
5751 PROP_no_crit_edges, /* properties_provided */
5752 0, /* properties_destroyed */
5753 0, /* todo_flags_start */
5754 TODO_dump_func, /* todo_flags_finish */
5755 0 /* letter */
5759 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5760 a temporary, make sure and register it to be renamed if necessary,
5761 and finally return the temporary. Put the statements to compute
5762 EXP before the current statement in BSI. */
5764 tree
5765 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5767 tree t, new_stmt, orig_stmt;
5769 if (is_gimple_val (exp))
5770 return exp;
5772 t = make_rename_temp (type, NULL);
5773 new_stmt = build (MODIFY_EXPR, type, t, exp);
5775 orig_stmt = bsi_stmt (*bsi);
5776 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5777 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5779 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5781 return t;
5784 /* Build a ternary operation and gimplify it. Emit code before BSI.
5785 Return the gimple_val holding the result. */
5787 tree
5788 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5789 tree type, tree a, tree b, tree c)
5791 tree ret;
5793 ret = fold (build3 (code, type, a, b, c));
5794 STRIP_NOPS (ret);
5796 return gimplify_val (bsi, type, ret);
5799 /* Build a binary operation and gimplify it. Emit code before BSI.
5800 Return the gimple_val holding the result. */
5802 tree
5803 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5804 tree type, tree a, tree b)
5806 tree ret;
5808 ret = fold (build2 (code, type, a, b));
5809 STRIP_NOPS (ret);
5811 return gimplify_val (bsi, type, ret);
5814 /* Build a unary operation and gimplify it. Emit code before BSI.
5815 Return the gimple_val holding the result. */
5817 tree
5818 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5819 tree a)
5821 tree ret;
5823 ret = fold (build1 (code, type, a));
5824 STRIP_NOPS (ret);
5826 return gimplify_val (bsi, type, ret);
5831 /* Emit return warnings. */
5833 static void
5834 execute_warn_function_return (void)
5836 #ifdef USE_MAPPED_LOCATION
5837 source_location location;
5838 #else
5839 location_t *locus;
5840 #endif
5841 tree last;
5842 edge e;
5843 edge_iterator ei;
5845 if (warn_missing_noreturn
5846 && !TREE_THIS_VOLATILE (cfun->decl)
5847 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5848 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5849 warning ("%Jfunction might be possible candidate for "
5850 "attribute %<noreturn%>",
5851 cfun->decl);
5853 /* If we have a path to EXIT, then we do return. */
5854 if (TREE_THIS_VOLATILE (cfun->decl)
5855 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5857 #ifdef USE_MAPPED_LOCATION
5858 location = UNKNOWN_LOCATION;
5859 #else
5860 locus = NULL;
5861 #endif
5862 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5864 last = last_stmt (e->src);
5865 if (TREE_CODE (last) == RETURN_EXPR
5866 #ifdef USE_MAPPED_LOCATION
5867 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5868 #else
5869 && (locus = EXPR_LOCUS (last)) != NULL)
5870 #endif
5871 break;
5873 #ifdef USE_MAPPED_LOCATION
5874 if (location == UNKNOWN_LOCATION)
5875 location = cfun->function_end_locus;
5876 warning ("%H%<noreturn%> function does return", &location);
5877 #else
5878 if (!locus)
5879 locus = &cfun->function_end_locus;
5880 warning ("%H%<noreturn%> function does return", locus);
5881 #endif
5884 /* If we see "return;" in some basic block, then we do reach the end
5885 without returning a value. */
5886 else if (warn_return_type
5887 && !TREE_NO_WARNING (cfun->decl)
5888 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5889 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5891 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5893 tree last = last_stmt (e->src);
5894 if (TREE_CODE (last) == RETURN_EXPR
5895 && TREE_OPERAND (last, 0) == NULL)
5897 #ifdef USE_MAPPED_LOCATION
5898 location = EXPR_LOCATION (last);
5899 if (location == UNKNOWN_LOCATION)
5900 location = cfun->function_end_locus;
5901 warning ("%Hcontrol reaches end of non-void function", &location);
5902 #else
5903 locus = EXPR_LOCUS (last);
5904 if (!locus)
5905 locus = &cfun->function_end_locus;
5906 warning ("%Hcontrol reaches end of non-void function", locus);
5907 #endif
5908 TREE_NO_WARNING (cfun->decl) = 1;
5909 break;
5916 /* Given a basic block B which ends with a conditional and has
5917 precisely two successors, determine which of the edges is taken if
5918 the conditional is true and which is taken if the conditional is
5919 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5921 void
5922 extract_true_false_edges_from_block (basic_block b,
5923 edge *true_edge,
5924 edge *false_edge)
5926 edge e = EDGE_SUCC (b, 0);
5928 if (e->flags & EDGE_TRUE_VALUE)
5930 *true_edge = e;
5931 *false_edge = EDGE_SUCC (b, 1);
5933 else
5935 *false_edge = e;
5936 *true_edge = EDGE_SUCC (b, 1);
5940 struct tree_opt_pass pass_warn_function_return =
5942 NULL, /* name */
5943 NULL, /* gate */
5944 execute_warn_function_return, /* execute */
5945 NULL, /* sub */
5946 NULL, /* next */
5947 0, /* static_pass_number */
5948 0, /* tv_id */
5949 PROP_cfg, /* properties_required */
5950 0, /* properties_provided */
5951 0, /* properties_destroyed */
5952 0, /* todo_flags_start */
5953 0, /* todo_flags_finish */
5954 0 /* letter */
5957 #include "gt-tree-cfg.h"