* tree-cfg.c (make_goto_expr_edges): Don't use error_mark_node.
[official-gcc.git] / gcc / tree-cfg.c
blob56b7bc2c95f6255d858cfc970ab874ec5b703aad
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
86 static htab_t edge_to_cases;
88 /* CFG statistics. */
89 struct cfg_stats_d
91 long num_merged_labels;
94 static struct cfg_stats_d cfg_stats;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
99 /* Basic blocks and flowgraphs. */
100 static basic_block create_bb (void *, void *, basic_block);
101 static void create_block_annotation (basic_block);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree);
105 static void factor_computed_gotos (void);
107 /* Edges. */
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block);
110 static void make_exit_edges (basic_block);
111 static void make_cond_expr_edges (basic_block);
112 static void make_switch_expr_edges (basic_block);
113 static void make_goto_expr_edges (basic_block);
114 static edge tree_redirect_edge_and_branch (edge, basic_block);
115 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116 static void split_critical_edges (void);
117 static bool remove_fallthru_edge (VEC(edge) *);
119 /* Various helpers. */
120 static inline bool stmt_starts_bb_p (tree, tree);
121 static int tree_verify_flow_info (void);
122 static void tree_make_forwarder_block (edge);
123 static bool tree_forwarder_block_p (basic_block, bool);
124 static void tree_cfg2vcg (FILE *);
126 /* Flowgraph optimization and cleanup. */
127 static void tree_merge_blocks (basic_block, basic_block);
128 static bool tree_can_merge_blocks_p (basic_block, basic_block);
129 static void remove_bb (basic_block);
130 static bool cleanup_control_flow (void);
131 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
132 static edge find_taken_edge_computed_goto (basic_block, tree);
133 static edge find_taken_edge_cond_expr (basic_block, tree);
134 static edge find_taken_edge_switch_expr (basic_block, tree);
135 static tree find_case_label_for_value (tree, tree);
136 static bool phi_alternatives_equal (basic_block, edge, edge);
137 static bool cleanup_forwarder_blocks (void);
140 /*---------------------------------------------------------------------------
141 Create basic blocks
142 ---------------------------------------------------------------------------*/
144 /* Entry point to the CFG builder for trees. TP points to the list of
145 statements to be added to the flowgraph. */
147 static void
148 build_tree_cfg (tree *tp)
150 /* Register specific tree functions. */
151 tree_register_cfg_hooks ();
153 /* Initialize rbi_pool. */
154 alloc_rbi_pool ();
156 /* Initialize the basic block array. */
157 init_flow ();
158 profile_status = PROFILE_ABSENT;
159 n_basic_blocks = 0;
160 last_basic_block = 0;
161 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
162 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
164 /* Build a mapping of labels to their associated blocks. */
165 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
166 "label to block map");
168 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
169 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
171 found_computed_goto = 0;
172 make_blocks (*tp);
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto)
180 factor_computed_gotos ();
182 /* Make sure there is always at least one block, even if it's empty. */
183 if (n_basic_blocks == 0)
184 create_empty_bb (ENTRY_BLOCK_PTR);
186 create_block_annotation (ENTRY_BLOCK_PTR);
187 create_block_annotation (EXIT_BLOCK_PTR);
189 /* Adjust the size of the array. */
190 VARRAY_GROW (basic_block_info, n_basic_blocks);
192 /* To speed up statement iterator walks, we first purge dead labels. */
193 cleanup_dead_labels ();
195 /* Group case nodes to reduce the number of edges.
196 We do this after cleaning up dead labels because otherwise we miss
197 a lot of obvious case merging opportunities. */
198 group_case_labels ();
200 /* Create the edges of the flowgraph. */
201 make_edges ();
203 /* Debugging dumps. */
205 /* Write the flowgraph to a VCG file. */
207 int local_dump_flags;
208 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
209 if (dump_file)
211 tree_cfg2vcg (dump_file);
212 dump_end (TDI_vcg, dump_file);
216 /* Dump a textual representation of the flowgraph. */
217 if (dump_file)
218 dump_tree_cfg (dump_file, dump_flags);
221 static void
222 execute_build_cfg (void)
224 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
227 struct tree_opt_pass pass_build_cfg =
229 "cfg", /* name */
230 NULL, /* gate */
231 execute_build_cfg, /* execute */
232 NULL, /* sub */
233 NULL, /* next */
234 0, /* static_pass_number */
235 TV_TREE_CFG, /* tv_id */
236 PROP_gimple_leh, /* properties_required */
237 PROP_cfg, /* properties_provided */
238 0, /* properties_destroyed */
239 0, /* todo_flags_start */
240 TODO_verify_stmts, /* todo_flags_finish */
241 0 /* letter */
244 /* Search the CFG for any computed gotos. If found, factor them to a
245 common computed goto site. Also record the location of that site so
246 that we can un-factor the gotos after we have converted back to
247 normal form. */
249 static void
250 factor_computed_gotos (void)
252 basic_block bb;
253 tree factored_label_decl = NULL;
254 tree var = NULL;
255 tree factored_computed_goto_label = NULL;
256 tree factored_computed_goto = NULL;
258 /* We know there are one or more computed gotos in this function.
259 Examine the last statement in each basic block to see if the block
260 ends with a computed goto. */
262 FOR_EACH_BB (bb)
264 block_stmt_iterator bsi = bsi_last (bb);
265 tree last;
267 if (bsi_end_p (bsi))
268 continue;
269 last = bsi_stmt (bsi);
271 /* Ignore the computed goto we create when we factor the original
272 computed gotos. */
273 if (last == factored_computed_goto)
274 continue;
276 /* If the last statement is a computed goto, factor it. */
277 if (computed_goto_p (last))
279 tree assignment;
281 /* The first time we find a computed goto we need to create
282 the factored goto block and the variable each original
283 computed goto will use for their goto destination. */
284 if (! factored_computed_goto)
286 basic_block new_bb = create_empty_bb (bb);
287 block_stmt_iterator new_bsi = bsi_start (new_bb);
289 /* Create the destination of the factored goto. Each original
290 computed goto will put its desired destination into this
291 variable and jump to the label we create immediately
292 below. */
293 var = create_tmp_var (ptr_type_node, "gotovar");
295 /* Build a label for the new block which will contain the
296 factored computed goto. */
297 factored_label_decl = create_artificial_label ();
298 factored_computed_goto_label
299 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
300 bsi_insert_after (&new_bsi, factored_computed_goto_label,
301 BSI_NEW_STMT);
303 /* Build our new computed goto. */
304 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
305 bsi_insert_after (&new_bsi, factored_computed_goto,
306 BSI_NEW_STMT);
309 /* Copy the original computed goto's destination into VAR. */
310 assignment = build (MODIFY_EXPR, ptr_type_node,
311 var, GOTO_DESTINATION (last));
312 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
314 /* And re-vector the computed goto to the new destination. */
315 GOTO_DESTINATION (last) = factored_label_decl;
321 /* Create annotations for a single basic block. */
323 static void
324 create_block_annotation (basic_block bb)
326 /* Verify that the tree_annotations field is clear. */
327 gcc_assert (!bb->tree_annotations);
328 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
332 /* Free the annotations for all the basic blocks. */
334 static void free_blocks_annotations (void)
336 clear_blocks_annotations ();
340 /* Clear the annotations for all the basic blocks. */
342 static void
343 clear_blocks_annotations (void)
345 basic_block bb;
347 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
348 bb->tree_annotations = NULL;
352 /* Build a flowgraph for the statement_list STMT_LIST. */
354 static void
355 make_blocks (tree stmt_list)
357 tree_stmt_iterator i = tsi_start (stmt_list);
358 tree stmt = NULL;
359 bool start_new_block = true;
360 bool first_stmt_of_list = true;
361 basic_block bb = ENTRY_BLOCK_PTR;
363 while (!tsi_end_p (i))
365 tree prev_stmt;
367 prev_stmt = stmt;
368 stmt = tsi_stmt (i);
370 /* If the statement starts a new basic block or if we have determined
371 in a previous pass that we need to create a new block for STMT, do
372 so now. */
373 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
375 if (!first_stmt_of_list)
376 stmt_list = tsi_split_statement_list_before (&i);
377 bb = create_basic_block (stmt_list, NULL, bb);
378 start_new_block = false;
381 /* Now add STMT to BB and create the subgraphs for special statement
382 codes. */
383 set_bb_for_stmt (stmt, bb);
385 if (computed_goto_p (stmt))
386 found_computed_goto = true;
388 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
389 next iteration. */
390 if (stmt_ends_bb_p (stmt))
391 start_new_block = true;
393 tsi_next (&i);
394 first_stmt_of_list = false;
399 /* Create and return a new empty basic block after bb AFTER. */
401 static basic_block
402 create_bb (void *h, void *e, basic_block after)
404 basic_block bb;
406 gcc_assert (!e);
408 /* Create and initialize a new basic block. Since alloc_block uses
409 ggc_alloc_cleared to allocate a basic block, we do not have to
410 clear the newly allocated basic block here. */
411 bb = alloc_block ();
413 bb->index = last_basic_block;
414 bb->flags = BB_NEW;
415 bb->stmt_list = h ? h : alloc_stmt_list ();
417 /* Add the new block to the linked list of blocks. */
418 link_block (bb, after);
420 /* Grow the basic block array if needed. */
421 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
423 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
424 VARRAY_GROW (basic_block_info, new_size);
427 /* Add the newly created block to the array. */
428 BASIC_BLOCK (last_basic_block) = bb;
430 create_block_annotation (bb);
432 n_basic_blocks++;
433 last_basic_block++;
435 initialize_bb_rbi (bb);
436 return bb;
440 /*---------------------------------------------------------------------------
441 Edge creation
442 ---------------------------------------------------------------------------*/
444 /* Fold COND_EXPR_COND of each COND_EXPR. */
446 static void
447 fold_cond_expr_cond (void)
449 basic_block bb;
451 FOR_EACH_BB (bb)
453 tree stmt = last_stmt (bb);
455 if (stmt
456 && TREE_CODE (stmt) == COND_EXPR)
458 tree cond = fold (COND_EXPR_COND (stmt));
459 if (integer_zerop (cond))
460 COND_EXPR_COND (stmt) = integer_zero_node;
461 else if (integer_onep (cond))
462 COND_EXPR_COND (stmt) = integer_one_node;
467 /* Join all the blocks in the flowgraph. */
469 static void
470 make_edges (void)
472 basic_block bb;
474 /* Create an edge from entry to the first block with executable
475 statements in it. */
476 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
478 /* Traverse the basic block array placing edges. */
479 FOR_EACH_BB (bb)
481 tree first = first_stmt (bb);
482 tree last = last_stmt (bb);
484 if (first)
486 /* Edges for statements that always alter flow control. */
487 if (is_ctrl_stmt (last))
488 make_ctrl_stmt_edges (bb);
490 /* Edges for statements that sometimes alter flow control. */
491 if (is_ctrl_altering_stmt (last))
492 make_exit_edges (bb);
495 /* Finally, if no edges were created above, this is a regular
496 basic block that only needs a fallthru edge. */
497 if (EDGE_COUNT (bb->succs) == 0)
498 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
501 /* We do not care about fake edges, so remove any that the CFG
502 builder inserted for completeness. */
503 remove_fake_exit_edges ();
505 /* Fold COND_EXPR_COND of each COND_EXPR. */
506 fold_cond_expr_cond ();
508 /* Clean up the graph and warn for unreachable code. */
509 cleanup_tree_cfg ();
513 /* Create edges for control statement at basic block BB. */
515 static void
516 make_ctrl_stmt_edges (basic_block bb)
518 tree last = last_stmt (bb);
520 gcc_assert (last);
521 switch (TREE_CODE (last))
523 case GOTO_EXPR:
524 make_goto_expr_edges (bb);
525 break;
527 case RETURN_EXPR:
528 make_edge (bb, EXIT_BLOCK_PTR, 0);
529 break;
531 case COND_EXPR:
532 make_cond_expr_edges (bb);
533 break;
535 case SWITCH_EXPR:
536 make_switch_expr_edges (bb);
537 break;
539 case RESX_EXPR:
540 make_eh_edges (last);
541 /* Yet another NORETURN hack. */
542 if (EDGE_COUNT (bb->succs) == 0)
543 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
544 break;
546 default:
547 gcc_unreachable ();
552 /* Create exit edges for statements in block BB that alter the flow of
553 control. Statements that alter the control flow are 'goto', 'return'
554 and calls to non-returning functions. */
556 static void
557 make_exit_edges (basic_block bb)
559 tree last = last_stmt (bb), op;
561 gcc_assert (last);
562 switch (TREE_CODE (last))
564 case CALL_EXPR:
565 /* If this function receives a nonlocal goto, then we need to
566 make edges from this call site to all the nonlocal goto
567 handlers. */
568 if (TREE_SIDE_EFFECTS (last)
569 && current_function_has_nonlocal_label)
570 make_goto_expr_edges (bb);
572 /* If this statement has reachable exception handlers, then
573 create abnormal edges to them. */
574 make_eh_edges (last);
576 /* Some calls are known not to return. For such calls we create
577 a fake edge.
579 We really need to revamp how we build edges so that it's not
580 such a bloody pain to avoid creating edges for this case since
581 all we do is remove these edges when we're done building the
582 CFG. */
583 if (call_expr_flags (last) & ECF_NORETURN)
585 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
586 return;
589 /* Don't forget the fall-thru edge. */
590 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
591 break;
593 case MODIFY_EXPR:
594 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
595 may have an abnormal edge. Search the RHS for this case and
596 create any required edges. */
597 op = get_call_expr_in (last);
598 if (op && TREE_SIDE_EFFECTS (op)
599 && current_function_has_nonlocal_label)
600 make_goto_expr_edges (bb);
602 make_eh_edges (last);
603 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
604 break;
606 default:
607 gcc_unreachable ();
612 /* Create the edges for a COND_EXPR starting at block BB.
613 At this point, both clauses must contain only simple gotos. */
615 static void
616 make_cond_expr_edges (basic_block bb)
618 tree entry = last_stmt (bb);
619 basic_block then_bb, else_bb;
620 tree then_label, else_label;
622 gcc_assert (entry);
623 gcc_assert (TREE_CODE (entry) == COND_EXPR);
625 /* Entry basic blocks for each component. */
626 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
627 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
628 then_bb = label_to_block (then_label);
629 else_bb = label_to_block (else_label);
631 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
632 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
635 /* Hashing routine for EDGE_TO_CASES. */
637 static hashval_t
638 edge_to_cases_hash (const void *p)
640 edge e = ((struct edge_to_cases_elt *)p)->e;
642 /* Hash on the edge itself (which is a pointer). */
643 return htab_hash_pointer (e);
646 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
647 for equality is just a pointer comparison. */
649 static int
650 edge_to_cases_eq (const void *p1, const void *p2)
652 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
653 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
655 return e1 == e2;
658 /* Called for each element in the hash table (P) as we delete the
659 edge to cases hash table.
661 Clear all the TREE_CHAINs to prevent problems with copying of
662 SWITCH_EXPRs and structure sharing rules, then free the hash table
663 element. */
665 static void
666 edge_to_cases_cleanup (void *p)
668 struct edge_to_cases_elt *elt = p;
669 tree t, next;
671 for (t = elt->case_labels; t; t = next)
673 next = TREE_CHAIN (t);
674 TREE_CHAIN (t) = NULL;
676 free (p);
679 /* Start recording information mapping edges to case labels. */
681 static void
682 start_recording_case_labels (void)
684 gcc_assert (edge_to_cases == NULL);
686 edge_to_cases = htab_create (37,
687 edge_to_cases_hash,
688 edge_to_cases_eq,
689 edge_to_cases_cleanup);
692 /* Return nonzero if we are recording information for case labels. */
694 static bool
695 recording_case_labels_p (void)
697 return (edge_to_cases != NULL);
700 /* Stop recording information mapping edges to case labels and
701 remove any information we have recorded. */
702 static void
703 end_recording_case_labels (void)
705 htab_delete (edge_to_cases);
706 edge_to_cases = NULL;
709 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
711 static void
712 record_switch_edge (edge e, tree case_label)
714 struct edge_to_cases_elt *elt;
715 void **slot;
717 /* Build a hash table element so we can see if E is already
718 in the table. */
719 elt = xmalloc (sizeof (struct edge_to_cases_elt));
720 elt->e = e;
721 elt->case_labels = case_label;
723 slot = htab_find_slot (edge_to_cases, elt, INSERT);
725 if (*slot == NULL)
727 /* E was not in the hash table. Install E into the hash table. */
728 *slot = (void *)elt;
730 else
732 /* E was already in the hash table. Free ELT as we do not need it
733 anymore. */
734 free (elt);
736 /* Get the entry stored in the hash table. */
737 elt = (struct edge_to_cases_elt *) *slot;
739 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
740 TREE_CHAIN (case_label) = elt->case_labels;
741 elt->case_labels = case_label;
745 /* If we are inside a {start,end}_recording_cases block, then return
746 a chain of CASE_LABEL_EXPRs from T which reference E.
748 Otherwise return NULL. */
750 static tree
751 get_cases_for_edge (edge e, tree t)
753 struct edge_to_cases_elt elt, *elt_p;
754 void **slot;
755 size_t i, n;
756 tree vec;
758 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
759 chains available. Return NULL so the caller can detect this case. */
760 if (!recording_case_labels_p ())
761 return NULL;
763 restart:
764 elt.e = e;
765 elt.case_labels = NULL;
766 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
768 if (slot)
770 elt_p = (struct edge_to_cases_elt *)*slot;
771 return elt_p->case_labels;
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
778 vec = SWITCH_LABELS (t);
779 n = TREE_VEC_LENGTH (vec);
780 for (i = 0; i < n; i++)
782 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
783 basic_block label_bb = label_to_block (lab);
784 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
786 goto restart;
789 /* Create the edges for a SWITCH_EXPR starting at block BB.
790 At this point, the switch body has been lowered and the
791 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
793 static void
794 make_switch_expr_edges (basic_block bb)
796 tree entry = last_stmt (bb);
797 size_t i, n;
798 tree vec;
800 vec = SWITCH_LABELS (entry);
801 n = TREE_VEC_LENGTH (vec);
803 for (i = 0; i < n; ++i)
805 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
806 basic_block label_bb = label_to_block (lab);
807 make_edge (bb, label_bb, 0);
812 /* Return the basic block holding label DEST. */
814 basic_block
815 label_to_block (tree dest)
817 int uid = LABEL_DECL_UID (dest);
819 /* We would die hard when faced by an undefined label. Emit a label to
820 the very first basic block. This will hopefully make even the dataflow
821 and undefined variable warnings quite right. */
822 if ((errorcount || sorrycount) && uid < 0)
824 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
825 tree stmt;
827 stmt = build1 (LABEL_EXPR, void_type_node, dest);
828 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
829 uid = LABEL_DECL_UID (dest);
831 return VARRAY_BB (label_to_block_map, uid);
835 /* Create edges for a goto statement at block BB. */
837 static void
838 make_goto_expr_edges (basic_block bb)
840 tree goto_t;
841 basic_block target_bb;
842 int for_call;
843 block_stmt_iterator last = bsi_last (bb);
845 goto_t = bsi_stmt (last);
847 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
848 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
849 from a nonlocal goto. */
850 if (TREE_CODE (goto_t) != GOTO_EXPR)
851 for_call = 1;
852 else
854 tree dest = GOTO_DESTINATION (goto_t);
855 for_call = 0;
857 /* A GOTO to a local label creates normal edges. */
858 if (simple_goto_p (goto_t))
860 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
861 #ifdef USE_MAPPED_LOCATION
862 e->goto_locus = EXPR_LOCATION (goto_t);
863 #else
864 e->goto_locus = EXPR_LOCUS (goto_t);
865 #endif
866 bsi_remove (&last);
867 return;
870 /* Nothing more to do for nonlocal gotos. */
871 if (TREE_CODE (dest) == LABEL_DECL)
872 return;
874 /* Computed gotos remain. */
877 /* Look for the block starting with the destination label. In the
878 case of a computed goto, make an edge to any label block we find
879 in the CFG. */
880 FOR_EACH_BB (target_bb)
882 block_stmt_iterator bsi;
884 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
886 tree target = bsi_stmt (bsi);
888 if (TREE_CODE (target) != LABEL_EXPR)
889 break;
891 if (
892 /* Computed GOTOs. Make an edge to every label block that has
893 been marked as a potential target for a computed goto. */
894 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
895 /* Nonlocal GOTO target. Make an edge to every label block
896 that has been marked as a potential target for a nonlocal
897 goto. */
898 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
900 make_edge (bb, target_bb, EDGE_ABNORMAL);
901 break;
906 /* Degenerate case of computed goto with no labels. */
907 if (!for_call && EDGE_COUNT (bb->succs) == 0)
908 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
912 /*---------------------------------------------------------------------------
913 Flowgraph analysis
914 ---------------------------------------------------------------------------*/
916 /* Remove unreachable blocks and other miscellaneous clean up work. */
918 bool
919 cleanup_tree_cfg (void)
921 bool retval = false;
923 timevar_push (TV_TREE_CLEANUP_CFG);
925 retval = cleanup_control_flow ();
926 retval |= delete_unreachable_blocks ();
928 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
929 which can get expensive. So we want to enable recording of edge
930 to CASE_LABEL_EXPR mappings around the call to
931 cleanup_forwarder_blocks. */
932 start_recording_case_labels ();
933 retval |= cleanup_forwarder_blocks ();
934 end_recording_case_labels ();
936 #ifdef ENABLE_CHECKING
937 if (retval)
939 gcc_assert (!cleanup_control_flow ());
940 gcc_assert (!delete_unreachable_blocks ());
941 gcc_assert (!cleanup_forwarder_blocks ());
943 #endif
945 /* Merging the blocks creates no new opportunities for the other
946 optimizations, so do it here. */
947 retval |= merge_seq_blocks ();
949 compact_blocks ();
951 #ifdef ENABLE_CHECKING
952 verify_flow_info ();
953 #endif
954 timevar_pop (TV_TREE_CLEANUP_CFG);
955 return retval;
959 /* Cleanup cfg and repair loop structures. */
961 void
962 cleanup_tree_cfg_loop (void)
964 bitmap changed_bbs = BITMAP_ALLOC (NULL);
966 cleanup_tree_cfg ();
968 fix_loop_structure (current_loops, changed_bbs);
969 calculate_dominance_info (CDI_DOMINATORS);
971 /* This usually does nothing. But sometimes parts of cfg that originally
972 were inside a loop get out of it due to edge removal (since they
973 become unreachable by back edges from latch). */
974 rewrite_into_loop_closed_ssa (changed_bbs);
976 BITMAP_FREE (changed_bbs);
978 #ifdef ENABLE_CHECKING
979 verify_loop_structure (current_loops);
980 #endif
983 /* Cleanup useless labels in basic blocks. This is something we wish
984 to do early because it allows us to group case labels before creating
985 the edges for the CFG, and it speeds up block statement iterators in
986 all passes later on.
987 We only run this pass once, running it more than once is probably not
988 profitable. */
990 /* A map from basic block index to the leading label of that block. */
991 static tree *label_for_bb;
993 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
994 static void
995 update_eh_label (struct eh_region *region)
997 tree old_label = get_eh_region_tree_label (region);
998 if (old_label)
1000 tree new_label;
1001 basic_block bb = label_to_block (old_label);
1003 /* ??? After optimizing, there may be EH regions with labels
1004 that have already been removed from the function body, so
1005 there is no basic block for them. */
1006 if (! bb)
1007 return;
1009 new_label = label_for_bb[bb->index];
1010 set_eh_region_tree_label (region, new_label);
1014 /* Given LABEL return the first label in the same basic block. */
1015 static tree
1016 main_block_label (tree label)
1018 basic_block bb = label_to_block (label);
1020 /* label_to_block possibly inserted undefined label into the chain. */
1021 if (!label_for_bb[bb->index])
1022 label_for_bb[bb->index] = label;
1023 return label_for_bb[bb->index];
1026 /* Cleanup redundant labels. This is a three-step process:
1027 1) Find the leading label for each block.
1028 2) Redirect all references to labels to the leading labels.
1029 3) Cleanup all useless labels. */
1031 void
1032 cleanup_dead_labels (void)
1034 basic_block bb;
1035 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1037 /* Find a suitable label for each block. We use the first user-defined
1038 label if there is one, or otherwise just the first label we see. */
1039 FOR_EACH_BB (bb)
1041 block_stmt_iterator i;
1043 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1045 tree label, stmt = bsi_stmt (i);
1047 if (TREE_CODE (stmt) != LABEL_EXPR)
1048 break;
1050 label = LABEL_EXPR_LABEL (stmt);
1052 /* If we have not yet seen a label for the current block,
1053 remember this one and see if there are more labels. */
1054 if (! label_for_bb[bb->index])
1056 label_for_bb[bb->index] = label;
1057 continue;
1060 /* If we did see a label for the current block already, but it
1061 is an artificially created label, replace it if the current
1062 label is a user defined label. */
1063 if (! DECL_ARTIFICIAL (label)
1064 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1066 label_for_bb[bb->index] = label;
1067 break;
1072 /* Now redirect all jumps/branches to the selected label.
1073 First do so for each block ending in a control statement. */
1074 FOR_EACH_BB (bb)
1076 tree stmt = last_stmt (bb);
1077 if (!stmt)
1078 continue;
1080 switch (TREE_CODE (stmt))
1082 case COND_EXPR:
1084 tree true_branch, false_branch;
1086 true_branch = COND_EXPR_THEN (stmt);
1087 false_branch = COND_EXPR_ELSE (stmt);
1089 GOTO_DESTINATION (true_branch)
1090 = main_block_label (GOTO_DESTINATION (true_branch));
1091 GOTO_DESTINATION (false_branch)
1092 = main_block_label (GOTO_DESTINATION (false_branch));
1094 break;
1097 case SWITCH_EXPR:
1099 size_t i;
1100 tree vec = SWITCH_LABELS (stmt);
1101 size_t n = TREE_VEC_LENGTH (vec);
1103 /* Replace all destination labels. */
1104 for (i = 0; i < n; ++i)
1106 tree elt = TREE_VEC_ELT (vec, i);
1107 tree label = main_block_label (CASE_LABEL (elt));
1108 CASE_LABEL (elt) = label;
1110 break;
1113 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1114 remove them until after we've created the CFG edges. */
1115 case GOTO_EXPR:
1116 if (! computed_goto_p (stmt))
1118 GOTO_DESTINATION (stmt)
1119 = main_block_label (GOTO_DESTINATION (stmt));
1120 break;
1123 default:
1124 break;
1128 for_each_eh_region (update_eh_label);
1130 /* Finally, purge dead labels. All user-defined labels and labels that
1131 can be the target of non-local gotos are preserved. */
1132 FOR_EACH_BB (bb)
1134 block_stmt_iterator i;
1135 tree label_for_this_bb = label_for_bb[bb->index];
1137 if (! label_for_this_bb)
1138 continue;
1140 for (i = bsi_start (bb); !bsi_end_p (i); )
1142 tree label, stmt = bsi_stmt (i);
1144 if (TREE_CODE (stmt) != LABEL_EXPR)
1145 break;
1147 label = LABEL_EXPR_LABEL (stmt);
1149 if (label == label_for_this_bb
1150 || ! DECL_ARTIFICIAL (label)
1151 || DECL_NONLOCAL (label))
1152 bsi_next (&i);
1153 else
1154 bsi_remove (&i);
1158 free (label_for_bb);
1161 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1162 and scan the sorted vector of cases. Combine the ones jumping to the
1163 same label.
1164 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1166 void
1167 group_case_labels (void)
1169 basic_block bb;
1171 FOR_EACH_BB (bb)
1173 tree stmt = last_stmt (bb);
1174 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1176 tree labels = SWITCH_LABELS (stmt);
1177 int old_size = TREE_VEC_LENGTH (labels);
1178 int i, j, new_size = old_size;
1179 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1180 tree default_label;
1182 /* The default label is always the last case in a switch
1183 statement after gimplification. */
1184 default_label = CASE_LABEL (default_case);
1186 /* Look for possible opportunities to merge cases.
1187 Ignore the last element of the label vector because it
1188 must be the default case. */
1189 i = 0;
1190 while (i < old_size - 1)
1192 tree base_case, base_label, base_high, type;
1193 base_case = TREE_VEC_ELT (labels, i);
1195 gcc_assert (base_case);
1196 base_label = CASE_LABEL (base_case);
1198 /* Discard cases that have the same destination as the
1199 default case. */
1200 if (base_label == default_label)
1202 TREE_VEC_ELT (labels, i) = NULL_TREE;
1203 i++;
1204 new_size--;
1205 continue;
1208 type = TREE_TYPE (CASE_LOW (base_case));
1209 base_high = CASE_HIGH (base_case) ?
1210 CASE_HIGH (base_case) : CASE_LOW (base_case);
1211 i++;
1212 /* Try to merge case labels. Break out when we reach the end
1213 of the label vector or when we cannot merge the next case
1214 label with the current one. */
1215 while (i < old_size - 1)
1217 tree merge_case = TREE_VEC_ELT (labels, i);
1218 tree merge_label = CASE_LABEL (merge_case);
1219 tree t = int_const_binop (PLUS_EXPR, base_high,
1220 integer_one_node, 1);
1222 /* Merge the cases if they jump to the same place,
1223 and their ranges are consecutive. */
1224 if (merge_label == base_label
1225 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1227 base_high = CASE_HIGH (merge_case) ?
1228 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1229 CASE_HIGH (base_case) = base_high;
1230 TREE_VEC_ELT (labels, i) = NULL_TREE;
1231 new_size--;
1232 i++;
1234 else
1235 break;
1239 /* Compress the case labels in the label vector, and adjust the
1240 length of the vector. */
1241 for (i = 0, j = 0; i < new_size; i++)
1243 while (! TREE_VEC_ELT (labels, j))
1244 j++;
1245 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1247 TREE_VEC_LENGTH (labels) = new_size;
1252 /* Checks whether we can merge block B into block A. */
1254 static bool
1255 tree_can_merge_blocks_p (basic_block a, basic_block b)
1257 tree stmt;
1258 block_stmt_iterator bsi;
1260 if (!single_succ_p (a))
1261 return false;
1263 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1264 return false;
1266 if (single_succ (a) != b)
1267 return false;
1269 if (!single_pred_p (b))
1270 return false;
1272 if (b == EXIT_BLOCK_PTR)
1273 return false;
1275 /* If A ends by a statement causing exceptions or something similar, we
1276 cannot merge the blocks. */
1277 stmt = last_stmt (a);
1278 if (stmt && stmt_ends_bb_p (stmt))
1279 return false;
1281 /* Do not allow a block with only a non-local label to be merged. */
1282 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1283 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1284 return false;
1286 /* There may be no phi nodes at the start of b. Most of these degenerate
1287 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1288 if (phi_nodes (b))
1289 return false;
1291 /* Do not remove user labels. */
1292 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1294 stmt = bsi_stmt (bsi);
1295 if (TREE_CODE (stmt) != LABEL_EXPR)
1296 break;
1297 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1298 return false;
1301 /* Protect the loop latches. */
1302 if (current_loops
1303 && b->loop_father->latch == b)
1304 return false;
1306 return true;
1310 /* Merge block B into block A. */
1312 static void
1313 tree_merge_blocks (basic_block a, basic_block b)
1315 block_stmt_iterator bsi;
1316 tree_stmt_iterator last;
1318 if (dump_file)
1319 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1321 /* Ensure that B follows A. */
1322 move_block_after (b, a);
1324 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1325 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1327 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1328 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1330 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1332 tree label = bsi_stmt (bsi);
1334 bsi_remove (&bsi);
1335 /* Now that we can thread computed gotos, we might have
1336 a situation where we have a forced label in block B
1337 However, the label at the start of block B might still be
1338 used in other ways (think about the runtime checking for
1339 Fortran assigned gotos). So we can not just delete the
1340 label. Instead we move the label to the start of block A. */
1341 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1343 block_stmt_iterator dest_bsi = bsi_start (a);
1344 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1347 else
1349 set_bb_for_stmt (bsi_stmt (bsi), a);
1350 bsi_next (&bsi);
1354 /* Merge the chains. */
1355 last = tsi_last (a->stmt_list);
1356 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1357 b->stmt_list = NULL;
1361 /* Walk the function tree removing unnecessary statements.
1363 * Empty statement nodes are removed
1365 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1367 * Unnecessary COND_EXPRs are removed
1369 * Some unnecessary BIND_EXPRs are removed
1371 Clearly more work could be done. The trick is doing the analysis
1372 and removal fast enough to be a net improvement in compile times.
1374 Note that when we remove a control structure such as a COND_EXPR
1375 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1376 to ensure we eliminate all the useless code. */
1378 struct rus_data
1380 tree *last_goto;
1381 bool repeat;
1382 bool may_throw;
1383 bool may_branch;
1384 bool has_label;
1387 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1389 static bool
1390 remove_useless_stmts_warn_notreached (tree stmt)
1392 if (EXPR_HAS_LOCATION (stmt))
1394 location_t loc = EXPR_LOCATION (stmt);
1395 if (LOCATION_LINE (loc) > 0)
1397 warning ("%Hwill never be executed", &loc);
1398 return true;
1402 switch (TREE_CODE (stmt))
1404 case STATEMENT_LIST:
1406 tree_stmt_iterator i;
1407 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1408 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1409 return true;
1411 break;
1413 case COND_EXPR:
1414 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1415 return true;
1416 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1417 return true;
1418 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1419 return true;
1420 break;
1422 case TRY_FINALLY_EXPR:
1423 case TRY_CATCH_EXPR:
1424 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1425 return true;
1426 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1427 return true;
1428 break;
1430 case CATCH_EXPR:
1431 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1432 case EH_FILTER_EXPR:
1433 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1434 case BIND_EXPR:
1435 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1437 default:
1438 /* Not a live container. */
1439 break;
1442 return false;
1445 static void
1446 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1448 tree then_clause, else_clause, cond;
1449 bool save_has_label, then_has_label, else_has_label;
1451 save_has_label = data->has_label;
1452 data->has_label = false;
1453 data->last_goto = NULL;
1455 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1457 then_has_label = data->has_label;
1458 data->has_label = false;
1459 data->last_goto = NULL;
1461 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1463 else_has_label = data->has_label;
1464 data->has_label = save_has_label | then_has_label | else_has_label;
1466 then_clause = COND_EXPR_THEN (*stmt_p);
1467 else_clause = COND_EXPR_ELSE (*stmt_p);
1468 cond = fold (COND_EXPR_COND (*stmt_p));
1470 /* If neither arm does anything at all, we can remove the whole IF. */
1471 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1473 *stmt_p = build_empty_stmt ();
1474 data->repeat = true;
1477 /* If there are no reachable statements in an arm, then we can
1478 zap the entire conditional. */
1479 else if (integer_nonzerop (cond) && !else_has_label)
1481 if (warn_notreached)
1482 remove_useless_stmts_warn_notreached (else_clause);
1483 *stmt_p = then_clause;
1484 data->repeat = true;
1486 else if (integer_zerop (cond) && !then_has_label)
1488 if (warn_notreached)
1489 remove_useless_stmts_warn_notreached (then_clause);
1490 *stmt_p = else_clause;
1491 data->repeat = true;
1494 /* Check a couple of simple things on then/else with single stmts. */
1495 else
1497 tree then_stmt = expr_only (then_clause);
1498 tree else_stmt = expr_only (else_clause);
1500 /* Notice branches to a common destination. */
1501 if (then_stmt && else_stmt
1502 && TREE_CODE (then_stmt) == GOTO_EXPR
1503 && TREE_CODE (else_stmt) == GOTO_EXPR
1504 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1506 *stmt_p = then_stmt;
1507 data->repeat = true;
1510 /* If the THEN/ELSE clause merely assigns a value to a variable or
1511 parameter which is already known to contain that value, then
1512 remove the useless THEN/ELSE clause. */
1513 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1515 if (else_stmt
1516 && TREE_CODE (else_stmt) == MODIFY_EXPR
1517 && TREE_OPERAND (else_stmt, 0) == cond
1518 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1519 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1521 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1522 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1523 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1524 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1526 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1527 ? then_stmt : else_stmt);
1528 tree *location = (TREE_CODE (cond) == EQ_EXPR
1529 ? &COND_EXPR_THEN (*stmt_p)
1530 : &COND_EXPR_ELSE (*stmt_p));
1532 if (stmt
1533 && TREE_CODE (stmt) == MODIFY_EXPR
1534 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1535 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1536 *location = alloc_stmt_list ();
1540 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1541 would be re-introduced during lowering. */
1542 data->last_goto = NULL;
1546 static void
1547 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1549 bool save_may_branch, save_may_throw;
1550 bool this_may_branch, this_may_throw;
1552 /* Collect may_branch and may_throw information for the body only. */
1553 save_may_branch = data->may_branch;
1554 save_may_throw = data->may_throw;
1555 data->may_branch = false;
1556 data->may_throw = false;
1557 data->last_goto = NULL;
1559 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1561 this_may_branch = data->may_branch;
1562 this_may_throw = data->may_throw;
1563 data->may_branch |= save_may_branch;
1564 data->may_throw |= save_may_throw;
1565 data->last_goto = NULL;
1567 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1569 /* If the body is empty, then we can emit the FINALLY block without
1570 the enclosing TRY_FINALLY_EXPR. */
1571 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1573 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1574 data->repeat = true;
1577 /* If the handler is empty, then we can emit the TRY block without
1578 the enclosing TRY_FINALLY_EXPR. */
1579 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1581 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1582 data->repeat = true;
1585 /* If the body neither throws, nor branches, then we can safely
1586 string the TRY and FINALLY blocks together. */
1587 else if (!this_may_branch && !this_may_throw)
1589 tree stmt = *stmt_p;
1590 *stmt_p = TREE_OPERAND (stmt, 0);
1591 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1592 data->repeat = true;
1597 static void
1598 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1600 bool save_may_throw, this_may_throw;
1601 tree_stmt_iterator i;
1602 tree stmt;
1604 /* Collect may_throw information for the body only. */
1605 save_may_throw = data->may_throw;
1606 data->may_throw = false;
1607 data->last_goto = NULL;
1609 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1611 this_may_throw = data->may_throw;
1612 data->may_throw = save_may_throw;
1614 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1615 if (!this_may_throw)
1617 if (warn_notreached)
1618 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1619 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1620 data->repeat = true;
1621 return;
1624 /* Process the catch clause specially. We may be able to tell that
1625 no exceptions propagate past this point. */
1627 this_may_throw = true;
1628 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1629 stmt = tsi_stmt (i);
1630 data->last_goto = NULL;
1632 switch (TREE_CODE (stmt))
1634 case CATCH_EXPR:
1635 for (; !tsi_end_p (i); tsi_next (&i))
1637 stmt = tsi_stmt (i);
1638 /* If we catch all exceptions, then the body does not
1639 propagate exceptions past this point. */
1640 if (CATCH_TYPES (stmt) == NULL)
1641 this_may_throw = false;
1642 data->last_goto = NULL;
1643 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1645 break;
1647 case EH_FILTER_EXPR:
1648 if (EH_FILTER_MUST_NOT_THROW (stmt))
1649 this_may_throw = false;
1650 else if (EH_FILTER_TYPES (stmt) == NULL)
1651 this_may_throw = false;
1652 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1653 break;
1655 default:
1656 /* Otherwise this is a cleanup. */
1657 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1659 /* If the cleanup is empty, then we can emit the TRY block without
1660 the enclosing TRY_CATCH_EXPR. */
1661 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1663 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1664 data->repeat = true;
1666 break;
1668 data->may_throw |= this_may_throw;
1672 static void
1673 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1675 tree block;
1677 /* First remove anything underneath the BIND_EXPR. */
1678 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1680 /* If the BIND_EXPR has no variables, then we can pull everything
1681 up one level and remove the BIND_EXPR, unless this is the toplevel
1682 BIND_EXPR for the current function or an inlined function.
1684 When this situation occurs we will want to apply this
1685 optimization again. */
1686 block = BIND_EXPR_BLOCK (*stmt_p);
1687 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1688 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1689 && (! block
1690 || ! BLOCK_ABSTRACT_ORIGIN (block)
1691 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1692 != FUNCTION_DECL)))
1694 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1695 data->repeat = true;
1700 static void
1701 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1703 tree dest = GOTO_DESTINATION (*stmt_p);
1705 data->may_branch = true;
1706 data->last_goto = NULL;
1708 /* Record the last goto expr, so that we can delete it if unnecessary. */
1709 if (TREE_CODE (dest) == LABEL_DECL)
1710 data->last_goto = stmt_p;
1714 static void
1715 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1717 tree label = LABEL_EXPR_LABEL (*stmt_p);
1719 data->has_label = true;
1721 /* We do want to jump across non-local label receiver code. */
1722 if (DECL_NONLOCAL (label))
1723 data->last_goto = NULL;
1725 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1727 *data->last_goto = build_empty_stmt ();
1728 data->repeat = true;
1731 /* ??? Add something here to delete unused labels. */
1735 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1736 decl. This allows us to eliminate redundant or useless
1737 calls to "const" functions.
1739 Gimplifier already does the same operation, but we may notice functions
1740 being const and pure once their calls has been gimplified, so we need
1741 to update the flag. */
1743 static void
1744 update_call_expr_flags (tree call)
1746 tree decl = get_callee_fndecl (call);
1747 if (!decl)
1748 return;
1749 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1750 TREE_SIDE_EFFECTS (call) = 0;
1751 if (TREE_NOTHROW (decl))
1752 TREE_NOTHROW (call) = 1;
1756 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1758 void
1759 notice_special_calls (tree t)
1761 int flags = call_expr_flags (t);
1763 if (flags & ECF_MAY_BE_ALLOCA)
1764 current_function_calls_alloca = true;
1765 if (flags & ECF_RETURNS_TWICE)
1766 current_function_calls_setjmp = true;
1770 /* Clear flags set by notice_special_calls. Used by dead code removal
1771 to update the flags. */
1773 void
1774 clear_special_calls (void)
1776 current_function_calls_alloca = false;
1777 current_function_calls_setjmp = false;
1781 static void
1782 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1784 tree t = *tp, op;
1786 switch (TREE_CODE (t))
1788 case COND_EXPR:
1789 remove_useless_stmts_cond (tp, data);
1790 break;
1792 case TRY_FINALLY_EXPR:
1793 remove_useless_stmts_tf (tp, data);
1794 break;
1796 case TRY_CATCH_EXPR:
1797 remove_useless_stmts_tc (tp, data);
1798 break;
1800 case BIND_EXPR:
1801 remove_useless_stmts_bind (tp, data);
1802 break;
1804 case GOTO_EXPR:
1805 remove_useless_stmts_goto (tp, data);
1806 break;
1808 case LABEL_EXPR:
1809 remove_useless_stmts_label (tp, data);
1810 break;
1812 case RETURN_EXPR:
1813 fold_stmt (tp);
1814 data->last_goto = NULL;
1815 data->may_branch = true;
1816 break;
1818 case CALL_EXPR:
1819 fold_stmt (tp);
1820 data->last_goto = NULL;
1821 notice_special_calls (t);
1822 update_call_expr_flags (t);
1823 if (tree_could_throw_p (t))
1824 data->may_throw = true;
1825 break;
1827 case MODIFY_EXPR:
1828 data->last_goto = NULL;
1829 fold_stmt (tp);
1830 op = get_call_expr_in (t);
1831 if (op)
1833 update_call_expr_flags (op);
1834 notice_special_calls (op);
1836 if (tree_could_throw_p (t))
1837 data->may_throw = true;
1838 break;
1840 case STATEMENT_LIST:
1842 tree_stmt_iterator i = tsi_start (t);
1843 while (!tsi_end_p (i))
1845 t = tsi_stmt (i);
1846 if (IS_EMPTY_STMT (t))
1848 tsi_delink (&i);
1849 continue;
1852 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1854 t = tsi_stmt (i);
1855 if (TREE_CODE (t) == STATEMENT_LIST)
1857 tsi_link_before (&i, t, TSI_SAME_STMT);
1858 tsi_delink (&i);
1860 else
1861 tsi_next (&i);
1864 break;
1865 case ASM_EXPR:
1866 fold_stmt (tp);
1867 data->last_goto = NULL;
1868 break;
1870 default:
1871 data->last_goto = NULL;
1872 break;
1876 static void
1877 remove_useless_stmts (void)
1879 struct rus_data data;
1881 clear_special_calls ();
1885 memset (&data, 0, sizeof (data));
1886 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1888 while (data.repeat);
1892 struct tree_opt_pass pass_remove_useless_stmts =
1894 "useless", /* name */
1895 NULL, /* gate */
1896 remove_useless_stmts, /* execute */
1897 NULL, /* sub */
1898 NULL, /* next */
1899 0, /* static_pass_number */
1900 0, /* tv_id */
1901 PROP_gimple_any, /* properties_required */
1902 0, /* properties_provided */
1903 0, /* properties_destroyed */
1904 0, /* todo_flags_start */
1905 TODO_dump_func, /* todo_flags_finish */
1906 0 /* letter */
1910 /* Remove obviously useless statements in basic block BB. */
1912 static void
1913 cfg_remove_useless_stmts_bb (basic_block bb)
1915 block_stmt_iterator bsi;
1916 tree stmt = NULL_TREE;
1917 tree cond, var = NULL_TREE, val = NULL_TREE;
1918 struct var_ann_d *ann;
1920 /* Check whether we come here from a condition, and if so, get the
1921 condition. */
1922 if (!single_pred_p (bb)
1923 || !(single_pred_edge (bb)->flags
1924 & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1925 return;
1927 cond = COND_EXPR_COND (last_stmt (single_pred (bb)));
1929 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1931 var = cond;
1932 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1933 ? boolean_false_node : boolean_true_node);
1935 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1936 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1937 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1939 var = TREE_OPERAND (cond, 0);
1940 val = (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE
1941 ? boolean_true_node : boolean_false_node);
1943 else
1945 if (single_pred_edge (bb)->flags & EDGE_FALSE_VALUE)
1946 cond = invert_truthvalue (cond);
1947 if (TREE_CODE (cond) == EQ_EXPR
1948 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1949 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1950 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1951 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1952 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1954 var = TREE_OPERAND (cond, 0);
1955 val = TREE_OPERAND (cond, 1);
1957 else
1958 return;
1961 /* Only work for normal local variables. */
1962 ann = var_ann (var);
1963 if (!ann
1964 || ann->may_aliases
1965 || TREE_ADDRESSABLE (var))
1966 return;
1968 if (! TREE_CONSTANT (val))
1970 ann = var_ann (val);
1971 if (!ann
1972 || ann->may_aliases
1973 || TREE_ADDRESSABLE (val))
1974 return;
1977 /* Ignore floating point variables, since comparison behaves weird for
1978 them. */
1979 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1980 return;
1982 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1984 stmt = bsi_stmt (bsi);
1986 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1987 which is already known to contain that value, then remove the useless
1988 THEN/ELSE clause. */
1989 if (TREE_CODE (stmt) == MODIFY_EXPR
1990 && TREE_OPERAND (stmt, 0) == var
1991 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1993 bsi_remove (&bsi);
1994 continue;
1997 /* Invalidate the var if we encounter something that could modify it.
1998 Likewise for the value it was previously set to. Note that we only
1999 consider values that are either a VAR_DECL or PARM_DECL so we
2000 can test for conflict very simply. */
2001 if (TREE_CODE (stmt) == ASM_EXPR
2002 || (TREE_CODE (stmt) == MODIFY_EXPR
2003 && (TREE_OPERAND (stmt, 0) == var
2004 || TREE_OPERAND (stmt, 0) == val)))
2005 return;
2007 bsi_next (&bsi);
2012 /* A CFG-aware version of remove_useless_stmts. */
2014 void
2015 cfg_remove_useless_stmts (void)
2017 basic_block bb;
2019 #ifdef ENABLE_CHECKING
2020 verify_flow_info ();
2021 #endif
2023 FOR_EACH_BB (bb)
2025 cfg_remove_useless_stmts_bb (bb);
2030 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2032 static void
2033 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2035 tree phi;
2037 /* Since this block is no longer reachable, we can just delete all
2038 of its PHI nodes. */
2039 phi = phi_nodes (bb);
2040 while (phi)
2042 tree next = PHI_CHAIN (phi);
2043 remove_phi_node (phi, NULL_TREE);
2044 phi = next;
2047 /* Remove edges to BB's successors. */
2048 while (EDGE_COUNT (bb->succs) > 0)
2049 remove_edge (EDGE_SUCC (bb, 0));
2053 /* Remove statements of basic block BB. */
2055 static void
2056 remove_bb (basic_block bb)
2058 block_stmt_iterator i;
2059 #ifdef USE_MAPPED_LOCATION
2060 source_location loc = UNKNOWN_LOCATION;
2061 #else
2062 source_locus loc = 0;
2063 #endif
2065 if (dump_file)
2067 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2068 if (dump_flags & TDF_DETAILS)
2070 dump_bb (bb, dump_file, 0);
2071 fprintf (dump_file, "\n");
2075 /* If we remove the header or the latch of a loop, mark the loop for
2076 removal by setting its header and latch to NULL. */
2077 if (current_loops)
2079 struct loop *loop = bb->loop_father;
2081 if (loop->latch == bb
2082 || loop->header == bb)
2084 loop->latch = NULL;
2085 loop->header = NULL;
2089 /* Remove all the instructions in the block. */
2090 for (i = bsi_start (bb); !bsi_end_p (i);)
2092 tree stmt = bsi_stmt (i);
2093 if (TREE_CODE (stmt) == LABEL_EXPR
2094 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2096 basic_block new_bb = bb->prev_bb;
2097 block_stmt_iterator new_bsi = bsi_start (new_bb);
2099 bsi_remove (&i);
2100 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2102 else
2104 release_defs (stmt);
2106 set_bb_for_stmt (stmt, NULL);
2107 bsi_remove (&i);
2110 /* Don't warn for removed gotos. Gotos are often removed due to
2111 jump threading, thus resulting in bogus warnings. Not great,
2112 since this way we lose warnings for gotos in the original
2113 program that are indeed unreachable. */
2114 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2116 #ifdef USE_MAPPED_LOCATION
2117 if (EXPR_HAS_LOCATION (stmt))
2118 loc = EXPR_LOCATION (stmt);
2119 #else
2120 source_locus t;
2121 t = EXPR_LOCUS (stmt);
2122 if (t && LOCATION_LINE (*t) > 0)
2123 loc = t;
2124 #endif
2128 /* If requested, give a warning that the first statement in the
2129 block is unreachable. We walk statements backwards in the
2130 loop above, so the last statement we process is the first statement
2131 in the block. */
2132 #ifdef USE_MAPPED_LOCATION
2133 if (warn_notreached && loc != UNKNOWN_LOCATION)
2134 warning ("%Hwill never be executed", &loc);
2135 #else
2136 if (warn_notreached && loc)
2137 warning ("%Hwill never be executed", loc);
2138 #endif
2140 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2143 /* A list of all the noreturn calls passed to modify_stmt.
2144 cleanup_control_flow uses it to detect cases where a mid-block
2145 indirect call has been turned into a noreturn call. When this
2146 happens, all the instructions after the call are no longer
2147 reachable and must be deleted as dead. */
2149 VEC(tree) *modified_noreturn_calls;
2151 /* Try to remove superfluous control structures. */
2153 static bool
2154 cleanup_control_flow (void)
2156 basic_block bb;
2157 block_stmt_iterator bsi;
2158 bool retval = false;
2159 tree stmt;
2161 /* Detect cases where a mid-block call is now known not to return. */
2162 while (VEC_length (tree, modified_noreturn_calls))
2164 stmt = VEC_pop (tree, modified_noreturn_calls);
2165 bb = bb_for_stmt (stmt);
2166 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2167 split_block (bb, stmt);
2170 FOR_EACH_BB (bb)
2172 bsi = bsi_last (bb);
2174 if (bsi_end_p (bsi))
2175 continue;
2177 stmt = bsi_stmt (bsi);
2178 if (TREE_CODE (stmt) == COND_EXPR
2179 || TREE_CODE (stmt) == SWITCH_EXPR)
2180 retval |= cleanup_control_expr_graph (bb, bsi);
2182 /* If we had a computed goto which has a compile-time determinable
2183 destination, then we can eliminate the goto. */
2184 if (TREE_CODE (stmt) == GOTO_EXPR
2185 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
2186 && TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) == LABEL_DECL)
2188 edge e;
2189 tree label;
2190 edge_iterator ei;
2191 basic_block target_block;
2192 bool removed_edge = false;
2194 /* First look at all the outgoing edges. Delete any outgoing
2195 edges which do not go to the right block. For the one
2196 edge which goes to the right block, fix up its flags. */
2197 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
2198 target_block = label_to_block (label);
2199 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2201 if (e->dest != target_block)
2203 removed_edge = true;
2204 remove_edge (e);
2206 else
2208 /* Turn off the EDGE_ABNORMAL flag. */
2209 e->flags &= ~EDGE_ABNORMAL;
2211 /* And set EDGE_FALLTHRU. */
2212 e->flags |= EDGE_FALLTHRU;
2213 ei_next (&ei);
2217 /* If we removed one or more edges, then we will need to fix the
2218 dominators. It may be possible to incrementally update them. */
2219 if (removed_edge)
2220 free_dominance_info (CDI_DOMINATORS);
2222 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
2223 relevant information we need. */
2224 bsi_remove (&bsi);
2225 retval = true;
2228 /* Check for indirect calls that have been turned into
2229 noreturn calls. */
2230 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2232 free_dominance_info (CDI_DOMINATORS);
2233 retval = true;
2236 return retval;
2240 /* Disconnect an unreachable block in the control expression starting
2241 at block BB. */
2243 static bool
2244 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2246 edge taken_edge;
2247 bool retval = false;
2248 tree expr = bsi_stmt (bsi), val;
2250 if (!single_succ_p (bb))
2252 edge e;
2253 edge_iterator ei;
2255 switch (TREE_CODE (expr))
2257 case COND_EXPR:
2258 val = COND_EXPR_COND (expr);
2259 break;
2261 case SWITCH_EXPR:
2262 val = SWITCH_COND (expr);
2263 if (TREE_CODE (val) != INTEGER_CST)
2264 return false;
2265 break;
2267 default:
2268 gcc_unreachable ();
2271 taken_edge = find_taken_edge (bb, val);
2272 if (!taken_edge)
2273 return false;
2275 /* Remove all the edges except the one that is always executed. */
2276 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2278 if (e != taken_edge)
2280 taken_edge->probability += e->probability;
2281 taken_edge->count += e->count;
2282 remove_edge (e);
2283 retval = true;
2285 else
2286 ei_next (&ei);
2288 if (taken_edge->probability > REG_BR_PROB_BASE)
2289 taken_edge->probability = REG_BR_PROB_BASE;
2291 else
2292 taken_edge = single_succ_edge (bb);
2294 bsi_remove (&bsi);
2295 taken_edge->flags = EDGE_FALLTHRU;
2297 /* We removed some paths from the cfg. */
2298 free_dominance_info (CDI_DOMINATORS);
2300 return retval;
2303 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2305 static bool
2306 remove_fallthru_edge (VEC(edge) *ev)
2308 edge_iterator ei;
2309 edge e;
2311 FOR_EACH_EDGE (e, ei, ev)
2312 if ((e->flags & EDGE_FALLTHRU) != 0)
2314 remove_edge (e);
2315 return true;
2317 return false;
2320 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
2324 edge
2325 find_taken_edge (basic_block bb, tree val)
2327 tree stmt;
2329 stmt = last_stmt (bb);
2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
2333 gcc_assert (val);
2335 if (! is_gimple_min_invariant (val))
2336 return NULL;
2338 if (TREE_CODE (stmt) == COND_EXPR)
2339 return find_taken_edge_cond_expr (bb, val);
2341 if (TREE_CODE (stmt) == SWITCH_EXPR)
2342 return find_taken_edge_switch_expr (bb, val);
2344 if (computed_goto_p (stmt))
2345 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2347 gcc_unreachable ();
2350 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2351 statement, determine which of the outgoing edges will be taken out of the
2352 block. Return NULL if either edge may be taken. */
2354 static edge
2355 find_taken_edge_computed_goto (basic_block bb, tree val)
2357 basic_block dest;
2358 edge e = NULL;
2360 dest = label_to_block (val);
2361 if (dest)
2363 e = find_edge (bb, dest);
2364 gcc_assert (e != NULL);
2367 return e;
2370 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2371 statement, determine which of the two edges will be taken out of the
2372 block. Return NULL if either edge may be taken. */
2374 static edge
2375 find_taken_edge_cond_expr (basic_block bb, tree val)
2377 edge true_edge, false_edge;
2379 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2381 /* Otherwise, try to determine which branch of the if() will be taken.
2382 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2383 we don't really know which edge will be taken at runtime. This
2384 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2385 if (integer_nonzerop (val))
2386 return true_edge;
2387 else if (integer_zerop (val))
2388 return false_edge;
2390 gcc_unreachable ();
2394 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2395 statement, determine which edge will be taken out of the block. Return
2396 NULL if any edge may be taken. */
2398 static edge
2399 find_taken_edge_switch_expr (basic_block bb, tree val)
2401 tree switch_expr, taken_case;
2402 basic_block dest_bb;
2403 edge e;
2405 switch_expr = last_stmt (bb);
2406 taken_case = find_case_label_for_value (switch_expr, val);
2407 dest_bb = label_to_block (CASE_LABEL (taken_case));
2409 e = find_edge (bb, dest_bb);
2410 gcc_assert (e);
2411 return e;
2415 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2416 We can make optimal use here of the fact that the case labels are
2417 sorted: We can do a binary search for a case matching VAL. */
2419 static tree
2420 find_case_label_for_value (tree switch_expr, tree val)
2422 tree vec = SWITCH_LABELS (switch_expr);
2423 size_t low, high, n = TREE_VEC_LENGTH (vec);
2424 tree default_case = TREE_VEC_ELT (vec, n - 1);
2426 for (low = -1, high = n - 1; high - low > 1; )
2428 size_t i = (high + low) / 2;
2429 tree t = TREE_VEC_ELT (vec, i);
2430 int cmp;
2432 /* Cache the result of comparing CASE_LOW and val. */
2433 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2435 if (cmp > 0)
2436 high = i;
2437 else
2438 low = i;
2440 if (CASE_HIGH (t) == NULL)
2442 /* A singe-valued case label. */
2443 if (cmp == 0)
2444 return t;
2446 else
2448 /* A case range. We can only handle integer ranges. */
2449 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2450 return t;
2454 return default_case;
2458 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2459 those alternatives are equal in each of the PHI nodes, then return
2460 true, else return false. */
2462 static bool
2463 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2465 int n1 = e1->dest_idx;
2466 int n2 = e2->dest_idx;
2467 tree phi;
2469 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2471 tree val1 = PHI_ARG_DEF (phi, n1);
2472 tree val2 = PHI_ARG_DEF (phi, n2);
2474 gcc_assert (val1 != NULL_TREE);
2475 gcc_assert (val2 != NULL_TREE);
2477 if (!operand_equal_for_phi_arg_p (val1, val2))
2478 return false;
2481 return true;
2485 /*---------------------------------------------------------------------------
2486 Debugging functions
2487 ---------------------------------------------------------------------------*/
2489 /* Dump tree-specific information of block BB to file OUTF. */
2491 void
2492 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2494 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2498 /* Dump a basic block on stderr. */
2500 void
2501 debug_tree_bb (basic_block bb)
2503 dump_bb (bb, stderr, 0);
2507 /* Dump basic block with index N on stderr. */
2509 basic_block
2510 debug_tree_bb_n (int n)
2512 debug_tree_bb (BASIC_BLOCK (n));
2513 return BASIC_BLOCK (n);
2517 /* Dump the CFG on stderr.
2519 FLAGS are the same used by the tree dumping functions
2520 (see TDF_* in tree.h). */
2522 void
2523 debug_tree_cfg (int flags)
2525 dump_tree_cfg (stderr, flags);
2529 /* Dump the program showing basic block boundaries on the given FILE.
2531 FLAGS are the same used by the tree dumping functions (see TDF_* in
2532 tree.h). */
2534 void
2535 dump_tree_cfg (FILE *file, int flags)
2537 if (flags & TDF_DETAILS)
2539 const char *funcname
2540 = lang_hooks.decl_printable_name (current_function_decl, 2);
2542 fputc ('\n', file);
2543 fprintf (file, ";; Function %s\n\n", funcname);
2544 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2545 n_basic_blocks, n_edges, last_basic_block);
2547 brief_dump_cfg (file);
2548 fprintf (file, "\n");
2551 if (flags & TDF_STATS)
2552 dump_cfg_stats (file);
2554 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2558 /* Dump CFG statistics on FILE. */
2560 void
2561 dump_cfg_stats (FILE *file)
2563 static long max_num_merged_labels = 0;
2564 unsigned long size, total = 0;
2565 int n_edges;
2566 basic_block bb;
2567 const char * const fmt_str = "%-30s%-13s%12s\n";
2568 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2569 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2570 const char *funcname
2571 = lang_hooks.decl_printable_name (current_function_decl, 2);
2574 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2576 fprintf (file, "---------------------------------------------------------\n");
2577 fprintf (file, fmt_str, "", " Number of ", "Memory");
2578 fprintf (file, fmt_str, "", " instances ", "used ");
2579 fprintf (file, "---------------------------------------------------------\n");
2581 size = n_basic_blocks * sizeof (struct basic_block_def);
2582 total += size;
2583 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2584 SCALE (size), LABEL (size));
2586 n_edges = 0;
2587 FOR_EACH_BB (bb)
2588 n_edges += EDGE_COUNT (bb->succs);
2589 size = n_edges * sizeof (struct edge_def);
2590 total += size;
2591 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2593 size = n_basic_blocks * sizeof (struct bb_ann_d);
2594 total += size;
2595 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2596 SCALE (size), LABEL (size));
2598 fprintf (file, "---------------------------------------------------------\n");
2599 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2600 LABEL (total));
2601 fprintf (file, "---------------------------------------------------------\n");
2602 fprintf (file, "\n");
2604 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2605 max_num_merged_labels = cfg_stats.num_merged_labels;
2607 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2608 cfg_stats.num_merged_labels, max_num_merged_labels);
2610 fprintf (file, "\n");
2614 /* Dump CFG statistics on stderr. Keep extern so that it's always
2615 linked in the final executable. */
2617 void
2618 debug_cfg_stats (void)
2620 dump_cfg_stats (stderr);
2624 /* Dump the flowgraph to a .vcg FILE. */
2626 static void
2627 tree_cfg2vcg (FILE *file)
2629 edge e;
2630 edge_iterator ei;
2631 basic_block bb;
2632 const char *funcname
2633 = lang_hooks.decl_printable_name (current_function_decl, 2);
2635 /* Write the file header. */
2636 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2637 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2638 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2640 /* Write blocks and edges. */
2641 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2643 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2644 e->dest->index);
2646 if (e->flags & EDGE_FAKE)
2647 fprintf (file, " linestyle: dotted priority: 10");
2648 else
2649 fprintf (file, " linestyle: solid priority: 100");
2651 fprintf (file, " }\n");
2653 fputc ('\n', file);
2655 FOR_EACH_BB (bb)
2657 enum tree_code head_code, end_code;
2658 const char *head_name, *end_name;
2659 int head_line = 0;
2660 int end_line = 0;
2661 tree first = first_stmt (bb);
2662 tree last = last_stmt (bb);
2664 if (first)
2666 head_code = TREE_CODE (first);
2667 head_name = tree_code_name[head_code];
2668 head_line = get_lineno (first);
2670 else
2671 head_name = "no-statement";
2673 if (last)
2675 end_code = TREE_CODE (last);
2676 end_name = tree_code_name[end_code];
2677 end_line = get_lineno (last);
2679 else
2680 end_name = "no-statement";
2682 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2683 bb->index, bb->index, head_name, head_line, end_name,
2684 end_line);
2686 FOR_EACH_EDGE (e, ei, bb->succs)
2688 if (e->dest == EXIT_BLOCK_PTR)
2689 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2690 else
2691 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2693 if (e->flags & EDGE_FAKE)
2694 fprintf (file, " priority: 10 linestyle: dotted");
2695 else
2696 fprintf (file, " priority: 100 linestyle: solid");
2698 fprintf (file, " }\n");
2701 if (bb->next_bb != EXIT_BLOCK_PTR)
2702 fputc ('\n', file);
2705 fputs ("}\n\n", file);
2710 /*---------------------------------------------------------------------------
2711 Miscellaneous helpers
2712 ---------------------------------------------------------------------------*/
2714 /* Return true if T represents a stmt that always transfers control. */
2716 bool
2717 is_ctrl_stmt (tree t)
2719 return (TREE_CODE (t) == COND_EXPR
2720 || TREE_CODE (t) == SWITCH_EXPR
2721 || TREE_CODE (t) == GOTO_EXPR
2722 || TREE_CODE (t) == RETURN_EXPR
2723 || TREE_CODE (t) == RESX_EXPR);
2727 /* Return true if T is a statement that may alter the flow of control
2728 (e.g., a call to a non-returning function). */
2730 bool
2731 is_ctrl_altering_stmt (tree t)
2733 tree call;
2735 gcc_assert (t);
2736 call = get_call_expr_in (t);
2737 if (call)
2739 /* A non-pure/const CALL_EXPR alters flow control if the current
2740 function has nonlocal labels. */
2741 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2742 return true;
2744 /* A CALL_EXPR also alters control flow if it does not return. */
2745 if (call_expr_flags (call) & ECF_NORETURN)
2746 return true;
2749 /* If a statement can throw, it alters control flow. */
2750 return tree_can_throw_internal (t);
2754 /* Return true if T is a computed goto. */
2756 bool
2757 computed_goto_p (tree t)
2759 return (TREE_CODE (t) == GOTO_EXPR
2760 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2764 /* Checks whether EXPR is a simple local goto. */
2766 bool
2767 simple_goto_p (tree expr)
2769 return (TREE_CODE (expr) == GOTO_EXPR
2770 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2774 /* Return true if T should start a new basic block. PREV_T is the
2775 statement preceding T. It is used when T is a label or a case label.
2776 Labels should only start a new basic block if their previous statement
2777 wasn't a label. Otherwise, sequence of labels would generate
2778 unnecessary basic blocks that only contain a single label. */
2780 static inline bool
2781 stmt_starts_bb_p (tree t, tree prev_t)
2783 if (t == NULL_TREE)
2784 return false;
2786 /* LABEL_EXPRs start a new basic block only if the preceding
2787 statement wasn't a label of the same type. This prevents the
2788 creation of consecutive blocks that have nothing but a single
2789 label. */
2790 if (TREE_CODE (t) == LABEL_EXPR)
2792 /* Nonlocal and computed GOTO targets always start a new block. */
2793 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2794 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2795 return true;
2797 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2799 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2800 return true;
2802 cfg_stats.num_merged_labels++;
2803 return false;
2805 else
2806 return true;
2809 return false;
2813 /* Return true if T should end a basic block. */
2815 bool
2816 stmt_ends_bb_p (tree t)
2818 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2822 /* Add gotos that used to be represented implicitly in the CFG. */
2824 void
2825 disband_implicit_edges (void)
2827 basic_block bb;
2828 block_stmt_iterator last;
2829 edge e;
2830 edge_iterator ei;
2831 tree stmt, label;
2833 FOR_EACH_BB (bb)
2835 last = bsi_last (bb);
2836 stmt = last_stmt (bb);
2838 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2840 /* Remove superfluous gotos from COND_EXPR branches. Moved
2841 from cfg_remove_useless_stmts here since it violates the
2842 invariants for tree--cfg correspondence and thus fits better
2843 here where we do it anyway. */
2844 e = find_edge (bb, bb->next_bb);
2845 if (e)
2847 if (e->flags & EDGE_TRUE_VALUE)
2848 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2849 else if (e->flags & EDGE_FALSE_VALUE)
2850 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2851 else
2852 gcc_unreachable ();
2853 e->flags |= EDGE_FALLTHRU;
2856 continue;
2859 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2861 /* Remove the RETURN_EXPR if we may fall though to the exit
2862 instead. */
2863 gcc_assert (single_succ_p (bb));
2864 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2866 if (bb->next_bb == EXIT_BLOCK_PTR
2867 && !TREE_OPERAND (stmt, 0))
2869 bsi_remove (&last);
2870 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2872 continue;
2875 /* There can be no fallthru edge if the last statement is a control
2876 one. */
2877 if (stmt && is_ctrl_stmt (stmt))
2878 continue;
2880 /* Find a fallthru edge and emit the goto if necessary. */
2881 FOR_EACH_EDGE (e, ei, bb->succs)
2882 if (e->flags & EDGE_FALLTHRU)
2883 break;
2885 if (!e || e->dest == bb->next_bb)
2886 continue;
2888 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2889 label = tree_block_label (e->dest);
2891 stmt = build1 (GOTO_EXPR, void_type_node, label);
2892 #ifdef USE_MAPPED_LOCATION
2893 SET_EXPR_LOCATION (stmt, e->goto_locus);
2894 #else
2895 SET_EXPR_LOCUS (stmt, e->goto_locus);
2896 #endif
2897 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2898 e->flags &= ~EDGE_FALLTHRU;
2902 /* Remove block annotations and other datastructures. */
2904 void
2905 delete_tree_cfg_annotations (void)
2907 basic_block bb;
2908 if (n_basic_blocks > 0)
2909 free_blocks_annotations ();
2911 label_to_block_map = NULL;
2912 free_rbi_pool ();
2913 FOR_EACH_BB (bb)
2914 bb->rbi = NULL;
2918 /* Return the first statement in basic block BB. */
2920 tree
2921 first_stmt (basic_block bb)
2923 block_stmt_iterator i = bsi_start (bb);
2924 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2928 /* Return the last statement in basic block BB. */
2930 tree
2931 last_stmt (basic_block bb)
2933 block_stmt_iterator b = bsi_last (bb);
2934 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2938 /* Return a pointer to the last statement in block BB. */
2940 tree *
2941 last_stmt_ptr (basic_block bb)
2943 block_stmt_iterator last = bsi_last (bb);
2944 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2948 /* Return the last statement of an otherwise empty block. Return NULL
2949 if the block is totally empty, or if it contains more than one
2950 statement. */
2952 tree
2953 last_and_only_stmt (basic_block bb)
2955 block_stmt_iterator i = bsi_last (bb);
2956 tree last, prev;
2958 if (bsi_end_p (i))
2959 return NULL_TREE;
2961 last = bsi_stmt (i);
2962 bsi_prev (&i);
2963 if (bsi_end_p (i))
2964 return last;
2966 /* Empty statements should no longer appear in the instruction stream.
2967 Everything that might have appeared before should be deleted by
2968 remove_useless_stmts, and the optimizers should just bsi_remove
2969 instead of smashing with build_empty_stmt.
2971 Thus the only thing that should appear here in a block containing
2972 one executable statement is a label. */
2973 prev = bsi_stmt (i);
2974 if (TREE_CODE (prev) == LABEL_EXPR)
2975 return last;
2976 else
2977 return NULL_TREE;
2981 /* Mark BB as the basic block holding statement T. */
2983 void
2984 set_bb_for_stmt (tree t, basic_block bb)
2986 if (TREE_CODE (t) == PHI_NODE)
2987 PHI_BB (t) = bb;
2988 else if (TREE_CODE (t) == STATEMENT_LIST)
2990 tree_stmt_iterator i;
2991 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2992 set_bb_for_stmt (tsi_stmt (i), bb);
2994 else
2996 stmt_ann_t ann = get_stmt_ann (t);
2997 ann->bb = bb;
2999 /* If the statement is a label, add the label to block-to-labels map
3000 so that we can speed up edge creation for GOTO_EXPRs. */
3001 if (TREE_CODE (t) == LABEL_EXPR)
3003 int uid;
3005 t = LABEL_EXPR_LABEL (t);
3006 uid = LABEL_DECL_UID (t);
3007 if (uid == -1)
3009 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
3010 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
3011 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
3013 else
3014 /* We're moving an existing label. Make sure that we've
3015 removed it from the old block. */
3016 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
3017 VARRAY_BB (label_to_block_map, uid) = bb;
3022 /* Finds iterator for STMT. */
3024 extern block_stmt_iterator
3025 bsi_for_stmt (tree stmt)
3027 block_stmt_iterator bsi;
3029 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
3030 if (bsi_stmt (bsi) == stmt)
3031 return bsi;
3033 gcc_unreachable ();
3036 /* Insert statement (or statement list) T before the statement
3037 pointed-to by iterator I. M specifies how to update iterator I
3038 after insertion (see enum bsi_iterator_update). */
3040 void
3041 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3043 set_bb_for_stmt (t, i->bb);
3044 tsi_link_before (&i->tsi, t, m);
3045 modify_stmt (t);
3049 /* Insert statement (or statement list) T after the statement
3050 pointed-to by iterator I. M specifies how to update iterator I
3051 after insertion (see enum bsi_iterator_update). */
3053 void
3054 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
3056 set_bb_for_stmt (t, i->bb);
3057 tsi_link_after (&i->tsi, t, m);
3058 modify_stmt (t);
3062 /* Remove the statement pointed to by iterator I. The iterator is updated
3063 to the next statement. */
3065 void
3066 bsi_remove (block_stmt_iterator *i)
3068 tree t = bsi_stmt (*i);
3069 set_bb_for_stmt (t, NULL);
3070 tsi_delink (&i->tsi);
3074 /* Move the statement at FROM so it comes right after the statement at TO. */
3076 void
3077 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
3079 tree stmt = bsi_stmt (*from);
3080 bsi_remove (from);
3081 bsi_insert_after (to, stmt, BSI_SAME_STMT);
3085 /* Move the statement at FROM so it comes right before the statement at TO. */
3087 void
3088 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
3090 tree stmt = bsi_stmt (*from);
3091 bsi_remove (from);
3092 bsi_insert_before (to, stmt, BSI_SAME_STMT);
3096 /* Move the statement at FROM to the end of basic block BB. */
3098 void
3099 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
3101 block_stmt_iterator last = bsi_last (bb);
3103 /* Have to check bsi_end_p because it could be an empty block. */
3104 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
3105 bsi_move_before (from, &last);
3106 else
3107 bsi_move_after (from, &last);
3111 /* Replace the contents of the statement pointed to by iterator BSI
3112 with STMT. If PRESERVE_EH_INFO is true, the exception handling
3113 information of the original statement is preserved. */
3115 void
3116 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
3118 int eh_region;
3119 tree orig_stmt = bsi_stmt (*bsi);
3121 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
3122 set_bb_for_stmt (stmt, bsi->bb);
3124 /* Preserve EH region information from the original statement, if
3125 requested by the caller. */
3126 if (preserve_eh_info)
3128 eh_region = lookup_stmt_eh_region (orig_stmt);
3129 if (eh_region >= 0)
3130 add_stmt_to_eh_region (stmt, eh_region);
3133 *bsi_stmt_ptr (*bsi) = stmt;
3134 modify_stmt (stmt);
3138 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3139 is made to place the statement in an existing basic block, but
3140 sometimes that isn't possible. When it isn't possible, the edge is
3141 split and the statement is added to the new block.
3143 In all cases, the returned *BSI points to the correct location. The
3144 return value is true if insertion should be done after the location,
3145 or false if it should be done before the location. If new basic block
3146 has to be created, it is stored in *NEW_BB. */
3148 static bool
3149 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3150 basic_block *new_bb)
3152 basic_block dest, src;
3153 tree tmp;
3155 dest = e->dest;
3156 restart:
3158 /* If the destination has one predecessor which has no PHI nodes,
3159 insert there. Except for the exit block.
3161 The requirement for no PHI nodes could be relaxed. Basically we
3162 would have to examine the PHIs to prove that none of them used
3163 the value set by the statement we want to insert on E. That
3164 hardly seems worth the effort. */
3165 if (single_pred_p (dest)
3166 && ! phi_nodes (dest)
3167 && dest != EXIT_BLOCK_PTR)
3169 *bsi = bsi_start (dest);
3170 if (bsi_end_p (*bsi))
3171 return true;
3173 /* Make sure we insert after any leading labels. */
3174 tmp = bsi_stmt (*bsi);
3175 while (TREE_CODE (tmp) == LABEL_EXPR)
3177 bsi_next (bsi);
3178 if (bsi_end_p (*bsi))
3179 break;
3180 tmp = bsi_stmt (*bsi);
3183 if (bsi_end_p (*bsi))
3185 *bsi = bsi_last (dest);
3186 return true;
3188 else
3189 return false;
3192 /* If the source has one successor, the edge is not abnormal and
3193 the last statement does not end a basic block, insert there.
3194 Except for the entry block. */
3195 src = e->src;
3196 if ((e->flags & EDGE_ABNORMAL) == 0
3197 && single_succ_p (src)
3198 && src != ENTRY_BLOCK_PTR)
3200 *bsi = bsi_last (src);
3201 if (bsi_end_p (*bsi))
3202 return true;
3204 tmp = bsi_stmt (*bsi);
3205 if (!stmt_ends_bb_p (tmp))
3206 return true;
3208 /* Insert code just before returning the value. We may need to decompose
3209 the return in the case it contains non-trivial operand. */
3210 if (TREE_CODE (tmp) == RETURN_EXPR)
3212 tree op = TREE_OPERAND (tmp, 0);
3213 if (!is_gimple_val (op))
3215 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3216 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3217 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3219 bsi_prev (bsi);
3220 return true;
3224 /* Otherwise, create a new basic block, and split this edge. */
3225 dest = split_edge (e);
3226 if (new_bb)
3227 *new_bb = dest;
3228 e = single_pred_edge (dest);
3229 goto restart;
3233 /* This routine will commit all pending edge insertions, creating any new
3234 basic blocks which are necessary. */
3236 void
3237 bsi_commit_edge_inserts (void)
3239 basic_block bb;
3240 edge e;
3241 edge_iterator ei;
3243 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3245 FOR_EACH_BB (bb)
3246 FOR_EACH_EDGE (e, ei, bb->succs)
3247 bsi_commit_one_edge_insert (e, NULL);
3251 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3252 to this block, otherwise set it to NULL. */
3254 void
3255 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3257 if (new_bb)
3258 *new_bb = NULL;
3259 if (PENDING_STMT (e))
3261 block_stmt_iterator bsi;
3262 tree stmt = PENDING_STMT (e);
3264 PENDING_STMT (e) = NULL_TREE;
3266 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3267 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3268 else
3269 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3274 /* Add STMT to the pending list of edge E. No actual insertion is
3275 made until a call to bsi_commit_edge_inserts () is made. */
3277 void
3278 bsi_insert_on_edge (edge e, tree stmt)
3280 append_to_statement_list (stmt, &PENDING_STMT (e));
3283 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3284 block has to be created, it is returned. */
3286 basic_block
3287 bsi_insert_on_edge_immediate (edge e, tree stmt)
3289 block_stmt_iterator bsi;
3290 basic_block new_bb = NULL;
3292 gcc_assert (!PENDING_STMT (e));
3294 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3295 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3296 else
3297 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3299 return new_bb;
3302 /*---------------------------------------------------------------------------
3303 Tree specific functions for CFG manipulation
3304 ---------------------------------------------------------------------------*/
3306 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3308 static void
3309 reinstall_phi_args (edge new_edge, edge old_edge)
3311 tree var, phi;
3313 if (!PENDING_STMT (old_edge))
3314 return;
3316 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3317 var && phi;
3318 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3320 tree result = TREE_PURPOSE (var);
3321 tree arg = TREE_VALUE (var);
3323 gcc_assert (result == PHI_RESULT (phi));
3325 add_phi_arg (phi, arg, new_edge);
3328 PENDING_STMT (old_edge) = NULL;
3331 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3332 Abort on abnormal edges. */
3334 static basic_block
3335 tree_split_edge (edge edge_in)
3337 basic_block new_bb, after_bb, dest, src;
3338 edge new_edge, e;
3340 /* Abnormal edges cannot be split. */
3341 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3343 src = edge_in->src;
3344 dest = edge_in->dest;
3346 /* Place the new block in the block list. Try to keep the new block
3347 near its "logical" location. This is of most help to humans looking
3348 at debugging dumps. */
3349 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3350 after_bb = edge_in->src;
3351 else
3352 after_bb = dest->prev_bb;
3354 new_bb = create_empty_bb (after_bb);
3355 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3356 new_bb->count = edge_in->count;
3357 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3358 new_edge->probability = REG_BR_PROB_BASE;
3359 new_edge->count = edge_in->count;
3361 e = redirect_edge_and_branch (edge_in, new_bb);
3362 gcc_assert (e);
3363 reinstall_phi_args (new_edge, e);
3365 return new_bb;
3369 /* Return true when BB has label LABEL in it. */
3371 static bool
3372 has_label_p (basic_block bb, tree label)
3374 block_stmt_iterator bsi;
3376 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3378 tree stmt = bsi_stmt (bsi);
3380 if (TREE_CODE (stmt) != LABEL_EXPR)
3381 return false;
3382 if (LABEL_EXPR_LABEL (stmt) == label)
3383 return true;
3385 return false;
3389 /* Callback for walk_tree, check that all elements with address taken are
3390 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3391 inside a PHI node. */
3393 static tree
3394 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3396 tree t = *tp, x;
3397 bool in_phi = (data != NULL);
3399 if (TYPE_P (t))
3400 *walk_subtrees = 0;
3402 /* Check operand N for being valid GIMPLE and give error MSG if not.
3403 We check for constants explicitly since they are not considered
3404 gimple invariants if they overflowed. */
3405 #define CHECK_OP(N, MSG) \
3406 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3407 && !is_gimple_val (TREE_OPERAND (t, N))) \
3408 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3410 switch (TREE_CODE (t))
3412 case SSA_NAME:
3413 if (SSA_NAME_IN_FREE_LIST (t))
3415 error ("SSA name in freelist but still referenced");
3416 return *tp;
3418 break;
3420 case MODIFY_EXPR:
3421 x = TREE_OPERAND (t, 0);
3422 if (TREE_CODE (x) == BIT_FIELD_REF
3423 && is_gimple_reg (TREE_OPERAND (x, 0)))
3425 error ("GIMPLE register modified with BIT_FIELD_REF");
3426 return t;
3428 break;
3430 case ADDR_EXPR:
3431 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3432 dead PHIs that take the address of something. But if the PHI
3433 result is dead, the fact that it takes the address of anything
3434 is irrelevant. Because we can not tell from here if a PHI result
3435 is dead, we just skip this check for PHIs altogether. This means
3436 we may be missing "valid" checks, but what can you do?
3437 This was PR19217. */
3438 if (in_phi)
3439 break;
3441 /* Skip any references (they will be checked when we recurse down the
3442 tree) and ensure that any variable used as a prefix is marked
3443 addressable. */
3444 for (x = TREE_OPERAND (t, 0);
3445 handled_component_p (x);
3446 x = TREE_OPERAND (x, 0))
3449 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3450 return NULL;
3451 if (!TREE_ADDRESSABLE (x))
3453 error ("address taken, but ADDRESSABLE bit not set");
3454 return x;
3456 break;
3458 case COND_EXPR:
3459 x = COND_EXPR_COND (t);
3460 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3462 error ("non-boolean used in condition");
3463 return x;
3465 break;
3467 case NOP_EXPR:
3468 case CONVERT_EXPR:
3469 case FIX_TRUNC_EXPR:
3470 case FIX_CEIL_EXPR:
3471 case FIX_FLOOR_EXPR:
3472 case FIX_ROUND_EXPR:
3473 case FLOAT_EXPR:
3474 case NEGATE_EXPR:
3475 case ABS_EXPR:
3476 case BIT_NOT_EXPR:
3477 case NON_LVALUE_EXPR:
3478 case TRUTH_NOT_EXPR:
3479 CHECK_OP (0, "Invalid operand to unary operator");
3480 break;
3482 case REALPART_EXPR:
3483 case IMAGPART_EXPR:
3484 case COMPONENT_REF:
3485 case ARRAY_REF:
3486 case ARRAY_RANGE_REF:
3487 case BIT_FIELD_REF:
3488 case VIEW_CONVERT_EXPR:
3489 /* We have a nest of references. Verify that each of the operands
3490 that determine where to reference is either a constant or a variable,
3491 verify that the base is valid, and then show we've already checked
3492 the subtrees. */
3493 while (handled_component_p (t))
3495 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3496 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3497 else if (TREE_CODE (t) == ARRAY_REF
3498 || TREE_CODE (t) == ARRAY_RANGE_REF)
3500 CHECK_OP (1, "Invalid array index.");
3501 if (TREE_OPERAND (t, 2))
3502 CHECK_OP (2, "Invalid array lower bound.");
3503 if (TREE_OPERAND (t, 3))
3504 CHECK_OP (3, "Invalid array stride.");
3506 else if (TREE_CODE (t) == BIT_FIELD_REF)
3508 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3509 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3512 t = TREE_OPERAND (t, 0);
3515 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3517 error ("Invalid reference prefix.");
3518 return t;
3520 *walk_subtrees = 0;
3521 break;
3523 case LT_EXPR:
3524 case LE_EXPR:
3525 case GT_EXPR:
3526 case GE_EXPR:
3527 case EQ_EXPR:
3528 case NE_EXPR:
3529 case UNORDERED_EXPR:
3530 case ORDERED_EXPR:
3531 case UNLT_EXPR:
3532 case UNLE_EXPR:
3533 case UNGT_EXPR:
3534 case UNGE_EXPR:
3535 case UNEQ_EXPR:
3536 case LTGT_EXPR:
3537 case PLUS_EXPR:
3538 case MINUS_EXPR:
3539 case MULT_EXPR:
3540 case TRUNC_DIV_EXPR:
3541 case CEIL_DIV_EXPR:
3542 case FLOOR_DIV_EXPR:
3543 case ROUND_DIV_EXPR:
3544 case TRUNC_MOD_EXPR:
3545 case CEIL_MOD_EXPR:
3546 case FLOOR_MOD_EXPR:
3547 case ROUND_MOD_EXPR:
3548 case RDIV_EXPR:
3549 case EXACT_DIV_EXPR:
3550 case MIN_EXPR:
3551 case MAX_EXPR:
3552 case LSHIFT_EXPR:
3553 case RSHIFT_EXPR:
3554 case LROTATE_EXPR:
3555 case RROTATE_EXPR:
3556 case BIT_IOR_EXPR:
3557 case BIT_XOR_EXPR:
3558 case BIT_AND_EXPR:
3559 CHECK_OP (0, "Invalid operand to binary operator");
3560 CHECK_OP (1, "Invalid operand to binary operator");
3561 break;
3563 default:
3564 break;
3566 return NULL;
3568 #undef CHECK_OP
3572 /* Verify STMT, return true if STMT is not in GIMPLE form.
3573 TODO: Implement type checking. */
3575 static bool
3576 verify_stmt (tree stmt, bool last_in_block)
3578 tree addr;
3580 if (!is_gimple_stmt (stmt))
3582 error ("Is not a valid GIMPLE statement.");
3583 goto fail;
3586 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3587 if (addr)
3589 debug_generic_stmt (addr);
3590 return true;
3593 /* If the statement is marked as part of an EH region, then it is
3594 expected that the statement could throw. Verify that when we
3595 have optimizations that simplify statements such that we prove
3596 that they cannot throw, that we update other data structures
3597 to match. */
3598 if (lookup_stmt_eh_region (stmt) >= 0)
3600 if (!tree_could_throw_p (stmt))
3602 error ("Statement marked for throw, but doesn%'t.");
3603 goto fail;
3605 if (!last_in_block && tree_can_throw_internal (stmt))
3607 error ("Statement marked for throw in middle of block.");
3608 goto fail;
3612 return false;
3614 fail:
3615 debug_generic_stmt (stmt);
3616 return true;
3620 /* Return true when the T can be shared. */
3622 static bool
3623 tree_node_can_be_shared (tree t)
3625 if (IS_TYPE_OR_DECL_P (t)
3626 /* We check for constants explicitly since they are not considered
3627 gimple invariants if they overflowed. */
3628 || CONSTANT_CLASS_P (t)
3629 || is_gimple_min_invariant (t)
3630 || TREE_CODE (t) == SSA_NAME
3631 || t == error_mark_node)
3632 return true;
3634 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3635 return true;
3637 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3638 /* We check for constants explicitly since they are not considered
3639 gimple invariants if they overflowed. */
3640 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3641 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3642 || (TREE_CODE (t) == COMPONENT_REF
3643 || TREE_CODE (t) == REALPART_EXPR
3644 || TREE_CODE (t) == IMAGPART_EXPR))
3645 t = TREE_OPERAND (t, 0);
3647 if (DECL_P (t))
3648 return true;
3650 return false;
3654 /* Called via walk_trees. Verify tree sharing. */
3656 static tree
3657 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3659 htab_t htab = (htab_t) data;
3660 void **slot;
3662 if (tree_node_can_be_shared (*tp))
3664 *walk_subtrees = false;
3665 return NULL;
3668 slot = htab_find_slot (htab, *tp, INSERT);
3669 if (*slot)
3670 return *slot;
3671 *slot = *tp;
3673 return NULL;
3677 /* Verify the GIMPLE statement chain. */
3679 void
3680 verify_stmts (void)
3682 basic_block bb;
3683 block_stmt_iterator bsi;
3684 bool err = false;
3685 htab_t htab;
3686 tree addr;
3688 timevar_push (TV_TREE_STMT_VERIFY);
3689 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3691 FOR_EACH_BB (bb)
3693 tree phi;
3694 int i;
3696 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3698 int phi_num_args = PHI_NUM_ARGS (phi);
3700 for (i = 0; i < phi_num_args; i++)
3702 tree t = PHI_ARG_DEF (phi, i);
3703 tree addr;
3705 /* Addressable variables do have SSA_NAMEs but they
3706 are not considered gimple values. */
3707 if (TREE_CODE (t) != SSA_NAME
3708 && TREE_CODE (t) != FUNCTION_DECL
3709 && !is_gimple_val (t))
3711 error ("PHI def is not a GIMPLE value");
3712 debug_generic_stmt (phi);
3713 debug_generic_stmt (t);
3714 err |= true;
3717 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3718 if (addr)
3720 debug_generic_stmt (addr);
3721 err |= true;
3724 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3725 if (addr)
3727 error ("Incorrect sharing of tree nodes");
3728 debug_generic_stmt (phi);
3729 debug_generic_stmt (addr);
3730 err |= true;
3735 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3737 tree stmt = bsi_stmt (bsi);
3738 bsi_next (&bsi);
3739 err |= verify_stmt (stmt, bsi_end_p (bsi));
3740 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3741 if (addr)
3743 error ("Incorrect sharing of tree nodes");
3744 debug_generic_stmt (stmt);
3745 debug_generic_stmt (addr);
3746 err |= true;
3751 if (err)
3752 internal_error ("verify_stmts failed.");
3754 htab_delete (htab);
3755 timevar_pop (TV_TREE_STMT_VERIFY);
3759 /* Verifies that the flow information is OK. */
3761 static int
3762 tree_verify_flow_info (void)
3764 int err = 0;
3765 basic_block bb;
3766 block_stmt_iterator bsi;
3767 tree stmt;
3768 edge e;
3769 edge_iterator ei;
3771 if (ENTRY_BLOCK_PTR->stmt_list)
3773 error ("ENTRY_BLOCK has a statement list associated with it\n");
3774 err = 1;
3777 if (EXIT_BLOCK_PTR->stmt_list)
3779 error ("EXIT_BLOCK has a statement list associated with it\n");
3780 err = 1;
3783 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3784 if (e->flags & EDGE_FALLTHRU)
3786 error ("Fallthru to exit from bb %d\n", e->src->index);
3787 err = 1;
3790 FOR_EACH_BB (bb)
3792 bool found_ctrl_stmt = false;
3794 stmt = NULL_TREE;
3796 /* Skip labels on the start of basic block. */
3797 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3799 tree prev_stmt = stmt;
3801 stmt = bsi_stmt (bsi);
3803 if (TREE_CODE (stmt) != LABEL_EXPR)
3804 break;
3806 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3808 error ("Nonlocal label %s is not first "
3809 "in a sequence of labels in bb %d",
3810 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3811 bb->index);
3812 err = 1;
3815 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3817 error ("Label %s to block does not match in bb %d\n",
3818 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3819 bb->index);
3820 err = 1;
3823 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3824 != current_function_decl)
3826 error ("Label %s has incorrect context in bb %d\n",
3827 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3828 bb->index);
3829 err = 1;
3833 /* Verify that body of basic block BB is free of control flow. */
3834 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3836 tree stmt = bsi_stmt (bsi);
3838 if (found_ctrl_stmt)
3840 error ("Control flow in the middle of basic block %d\n",
3841 bb->index);
3842 err = 1;
3845 if (stmt_ends_bb_p (stmt))
3846 found_ctrl_stmt = true;
3848 if (TREE_CODE (stmt) == LABEL_EXPR)
3850 error ("Label %s in the middle of basic block %d\n",
3851 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3852 bb->index);
3853 err = 1;
3856 bsi = bsi_last (bb);
3857 if (bsi_end_p (bsi))
3858 continue;
3860 stmt = bsi_stmt (bsi);
3862 if (is_ctrl_stmt (stmt))
3864 FOR_EACH_EDGE (e, ei, bb->succs)
3865 if (e->flags & EDGE_FALLTHRU)
3867 error ("Fallthru edge after a control statement in bb %d \n",
3868 bb->index);
3869 err = 1;
3873 switch (TREE_CODE (stmt))
3875 case COND_EXPR:
3877 edge true_edge;
3878 edge false_edge;
3879 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3880 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3882 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3883 err = 1;
3886 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3888 if (!true_edge || !false_edge
3889 || !(true_edge->flags & EDGE_TRUE_VALUE)
3890 || !(false_edge->flags & EDGE_FALSE_VALUE)
3891 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3892 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3893 || EDGE_COUNT (bb->succs) >= 3)
3895 error ("Wrong outgoing edge flags at end of bb %d\n",
3896 bb->index);
3897 err = 1;
3900 if (!has_label_p (true_edge->dest,
3901 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3903 error ("%<then%> label does not match edge at end of bb %d\n",
3904 bb->index);
3905 err = 1;
3908 if (!has_label_p (false_edge->dest,
3909 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3911 error ("%<else%> label does not match edge at end of bb %d\n",
3912 bb->index);
3913 err = 1;
3916 break;
3918 case GOTO_EXPR:
3919 if (simple_goto_p (stmt))
3921 error ("Explicit goto at end of bb %d\n", bb->index);
3922 err = 1;
3924 else
3926 /* FIXME. We should double check that the labels in the
3927 destination blocks have their address taken. */
3928 FOR_EACH_EDGE (e, ei, bb->succs)
3929 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3930 | EDGE_FALSE_VALUE))
3931 || !(e->flags & EDGE_ABNORMAL))
3933 error ("Wrong outgoing edge flags at end of bb %d\n",
3934 bb->index);
3935 err = 1;
3938 break;
3940 case RETURN_EXPR:
3941 if (!single_succ_p (bb)
3942 || (single_succ_edge (bb)->flags
3943 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3944 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3946 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3947 err = 1;
3949 if (single_succ (bb) != EXIT_BLOCK_PTR)
3951 error ("Return edge does not point to exit in bb %d\n",
3952 bb->index);
3953 err = 1;
3955 break;
3957 case SWITCH_EXPR:
3959 tree prev;
3960 edge e;
3961 size_t i, n;
3962 tree vec;
3964 vec = SWITCH_LABELS (stmt);
3965 n = TREE_VEC_LENGTH (vec);
3967 /* Mark all the destination basic blocks. */
3968 for (i = 0; i < n; ++i)
3970 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3971 basic_block label_bb = label_to_block (lab);
3973 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3974 label_bb->aux = (void *)1;
3977 /* Verify that the case labels are sorted. */
3978 prev = TREE_VEC_ELT (vec, 0);
3979 for (i = 1; i < n - 1; ++i)
3981 tree c = TREE_VEC_ELT (vec, i);
3982 if (! CASE_LOW (c))
3984 error ("Found default case not at end of case vector");
3985 err = 1;
3986 continue;
3988 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3990 error ("Case labels not sorted:\n ");
3991 print_generic_expr (stderr, prev, 0);
3992 fprintf (stderr," is greater than ");
3993 print_generic_expr (stderr, c, 0);
3994 fprintf (stderr," but comes before it.\n");
3995 err = 1;
3997 prev = c;
3999 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
4001 error ("No default case found at end of case vector");
4002 err = 1;
4005 FOR_EACH_EDGE (e, ei, bb->succs)
4007 if (!e->dest->aux)
4009 error ("Extra outgoing edge %d->%d\n",
4010 bb->index, e->dest->index);
4011 err = 1;
4013 e->dest->aux = (void *)2;
4014 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4015 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4017 error ("Wrong outgoing edge flags at end of bb %d\n",
4018 bb->index);
4019 err = 1;
4023 /* Check that we have all of them. */
4024 for (i = 0; i < n; ++i)
4026 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4027 basic_block label_bb = label_to_block (lab);
4029 if (label_bb->aux != (void *)2)
4031 error ("Missing edge %i->%i",
4032 bb->index, label_bb->index);
4033 err = 1;
4037 FOR_EACH_EDGE (e, ei, bb->succs)
4038 e->dest->aux = (void *)0;
4041 default: ;
4045 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
4046 verify_dominators (CDI_DOMINATORS);
4048 return err;
4052 /* Updates phi nodes after creating a forwarder block joined
4053 by edge FALLTHRU. */
4055 static void
4056 tree_make_forwarder_block (edge fallthru)
4058 edge e;
4059 edge_iterator ei;
4060 basic_block dummy, bb;
4061 tree phi, new_phi, var;
4063 dummy = fallthru->src;
4064 bb = fallthru->dest;
4066 if (single_pred_p (bb))
4067 return;
4069 /* If we redirected a branch we must create new phi nodes at the
4070 start of BB. */
4071 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4073 var = PHI_RESULT (phi);
4074 new_phi = create_phi_node (var, bb);
4075 SSA_NAME_DEF_STMT (var) = new_phi;
4076 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4077 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4080 /* Ensure that the PHI node chain is in the same order. */
4081 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4083 /* Add the arguments we have stored on edges. */
4084 FOR_EACH_EDGE (e, ei, bb->preds)
4086 if (e == fallthru)
4087 continue;
4089 flush_pending_stmts (e);
4094 /* Return true if basic block BB does nothing except pass control
4095 flow to another block and that we can safely insert a label at
4096 the start of the successor block.
4098 As a precondition, we require that BB be not equal to
4099 ENTRY_BLOCK_PTR. */
4101 static bool
4102 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
4104 block_stmt_iterator bsi;
4106 /* BB must have a single outgoing edge. */
4107 if (single_succ_p (bb) != 1
4108 /* If PHI_WANTED is false, BB must not have any PHI nodes.
4109 Otherwise, BB must have PHI nodes. */
4110 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
4111 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
4112 || single_succ (bb) == EXIT_BLOCK_PTR
4113 /* Nor should this be an infinite loop. */
4114 || single_succ (bb) == bb
4115 /* BB may not have an abnormal outgoing edge. */
4116 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4117 return false;
4119 #if ENABLE_CHECKING
4120 gcc_assert (bb != ENTRY_BLOCK_PTR);
4121 #endif
4123 /* Now walk through the statements backward. We can ignore labels,
4124 anything else means this is not a forwarder block. */
4125 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4127 tree stmt = bsi_stmt (bsi);
4129 switch (TREE_CODE (stmt))
4131 case LABEL_EXPR:
4132 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4133 return false;
4134 break;
4136 default:
4137 return false;
4141 if (find_edge (ENTRY_BLOCK_PTR, bb))
4142 return false;
4144 if (current_loops)
4146 basic_block dest;
4147 /* Protect loop latches, headers and preheaders. */
4148 if (bb->loop_father->header == bb)
4149 return false;
4150 dest = EDGE_SUCC (bb, 0)->dest;
4152 if (dest->loop_father->header == dest)
4153 return false;
4156 return true;
4159 /* Return true if BB has at least one abnormal incoming edge. */
4161 static inline bool
4162 has_abnormal_incoming_edge_p (basic_block bb)
4164 edge e;
4165 edge_iterator ei;
4167 FOR_EACH_EDGE (e, ei, bb->preds)
4168 if (e->flags & EDGE_ABNORMAL)
4169 return true;
4171 return false;
4174 /* Removes forwarder block BB. Returns false if this failed. If a new
4175 forwarder block is created due to redirection of edges, it is
4176 stored to worklist. */
4178 static bool
4179 remove_forwarder_block (basic_block bb, basic_block **worklist)
4181 edge succ = single_succ_edge (bb), e, s;
4182 basic_block dest = succ->dest;
4183 tree label;
4184 tree phi;
4185 edge_iterator ei;
4186 block_stmt_iterator bsi, bsi_to;
4187 bool seen_abnormal_edge = false;
4189 /* We check for infinite loops already in tree_forwarder_block_p.
4190 However it may happen that the infinite loop is created
4191 afterwards due to removal of forwarders. */
4192 if (dest == bb)
4193 return false;
4195 /* If the destination block consists of a nonlocal label, do not merge
4196 it. */
4197 label = first_stmt (dest);
4198 if (label
4199 && TREE_CODE (label) == LABEL_EXPR
4200 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4201 return false;
4203 /* If there is an abnormal edge to basic block BB, but not into
4204 dest, problems might occur during removal of the phi node at out
4205 of ssa due to overlapping live ranges of registers.
4207 If there is an abnormal edge in DEST, the problems would occur
4208 anyway since cleanup_dead_labels would then merge the labels for
4209 two different eh regions, and rest of exception handling code
4210 does not like it.
4212 So if there is an abnormal edge to BB, proceed only if there is
4213 no abnormal edge to DEST and there are no phi nodes in DEST. */
4214 if (has_abnormal_incoming_edge_p (bb))
4216 seen_abnormal_edge = true;
4218 if (has_abnormal_incoming_edge_p (dest)
4219 || phi_nodes (dest) != NULL_TREE)
4220 return false;
4223 /* If there are phi nodes in DEST, and some of the blocks that are
4224 predecessors of BB are also predecessors of DEST, check that the
4225 phi node arguments match. */
4226 if (phi_nodes (dest))
4228 FOR_EACH_EDGE (e, ei, bb->preds)
4230 s = find_edge (e->src, dest);
4231 if (!s)
4232 continue;
4234 if (!phi_alternatives_equal (dest, succ, s))
4235 return false;
4239 /* Redirect the edges. */
4240 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4242 if (e->flags & EDGE_ABNORMAL)
4244 /* If there is an abnormal edge, redirect it anyway, and
4245 move the labels to the new block to make it legal. */
4246 s = redirect_edge_succ_nodup (e, dest);
4248 else
4249 s = redirect_edge_and_branch (e, dest);
4251 if (s == e)
4253 /* Create arguments for the phi nodes, since the edge was not
4254 here before. */
4255 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4256 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4258 else
4260 /* The source basic block might become a forwarder. We know
4261 that it was not a forwarder before, since it used to have
4262 at least two outgoing edges, so we may just add it to
4263 worklist. */
4264 if (tree_forwarder_block_p (s->src, false))
4265 *(*worklist)++ = s->src;
4269 if (seen_abnormal_edge)
4271 /* Move the labels to the new block, so that the redirection of
4272 the abnormal edges works. */
4274 bsi_to = bsi_start (dest);
4275 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4277 label = bsi_stmt (bsi);
4278 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4279 bsi_remove (&bsi);
4280 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4284 /* Update the dominators. */
4285 if (dom_info_available_p (CDI_DOMINATORS))
4287 basic_block dom, dombb, domdest;
4289 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4290 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4291 if (domdest == bb)
4293 /* Shortcut to avoid calling (relatively expensive)
4294 nearest_common_dominator unless necessary. */
4295 dom = dombb;
4297 else
4298 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4300 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4303 /* And kill the forwarder block. */
4304 delete_basic_block (bb);
4306 return true;
4309 /* Removes forwarder blocks. */
4311 static bool
4312 cleanup_forwarder_blocks (void)
4314 basic_block bb;
4315 bool changed = false;
4316 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4317 basic_block *current = worklist;
4319 FOR_EACH_BB (bb)
4321 if (tree_forwarder_block_p (bb, false))
4322 *current++ = bb;
4325 while (current != worklist)
4327 bb = *--current;
4328 changed |= remove_forwarder_block (bb, &current);
4331 free (worklist);
4332 return changed;
4335 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4337 static void
4338 remove_forwarder_block_with_phi (basic_block bb)
4340 edge succ = single_succ_edge (bb);
4341 basic_block dest = succ->dest;
4342 tree label;
4343 basic_block dombb, domdest, dom;
4345 /* We check for infinite loops already in tree_forwarder_block_p.
4346 However it may happen that the infinite loop is created
4347 afterwards due to removal of forwarders. */
4348 if (dest == bb)
4349 return;
4351 /* If the destination block consists of a nonlocal label, do not
4352 merge it. */
4353 label = first_stmt (dest);
4354 if (label
4355 && TREE_CODE (label) == LABEL_EXPR
4356 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4357 return;
4359 /* Redirect each incoming edge to BB to DEST. */
4360 while (EDGE_COUNT (bb->preds) > 0)
4362 edge e = EDGE_PRED (bb, 0), s;
4363 tree phi;
4365 s = find_edge (e->src, dest);
4366 if (s)
4368 /* We already have an edge S from E->src to DEST. If S and
4369 E->dest's sole successor edge have the same PHI arguments
4370 at DEST, redirect S to DEST. */
4371 if (phi_alternatives_equal (dest, s, succ))
4373 e = redirect_edge_and_branch (e, dest);
4374 PENDING_STMT (e) = NULL_TREE;
4375 continue;
4378 /* PHI arguments are different. Create a forwarder block by
4379 splitting E so that we can merge PHI arguments on E to
4380 DEST. */
4381 e = single_succ_edge (split_edge (e));
4384 s = redirect_edge_and_branch (e, dest);
4386 /* redirect_edge_and_branch must not create a new edge. */
4387 gcc_assert (s == e);
4389 /* Add to the PHI nodes at DEST each PHI argument removed at the
4390 destination of E. */
4391 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4393 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4395 if (TREE_CODE (def) == SSA_NAME)
4397 tree var;
4399 /* If DEF is one of the results of PHI nodes removed during
4400 redirection, replace it with the PHI argument that used
4401 to be on E. */
4402 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4404 tree old_arg = TREE_PURPOSE (var);
4405 tree new_arg = TREE_VALUE (var);
4407 if (def == old_arg)
4409 def = new_arg;
4410 break;
4415 add_phi_arg (phi, def, s);
4418 PENDING_STMT (e) = NULL;
4421 /* Update the dominators. */
4422 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4423 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4424 if (domdest == bb)
4426 /* Shortcut to avoid calling (relatively expensive)
4427 nearest_common_dominator unless necessary. */
4428 dom = dombb;
4430 else
4431 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4433 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4435 /* Remove BB since all of BB's incoming edges have been redirected
4436 to DEST. */
4437 delete_basic_block (bb);
4440 /* This pass merges PHI nodes if one feeds into another. For example,
4441 suppose we have the following:
4443 goto <bb 9> (<L9>);
4445 <L8>:;
4446 tem_17 = foo ();
4448 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4449 <L9>:;
4451 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4452 <L10>:;
4454 Then we merge the first PHI node into the second one like so:
4456 goto <bb 9> (<L10>);
4458 <L8>:;
4459 tem_17 = foo ();
4461 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4462 <L10>:;
4465 static void
4466 merge_phi_nodes (void)
4468 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4469 basic_block *current = worklist;
4470 basic_block bb;
4472 calculate_dominance_info (CDI_DOMINATORS);
4474 /* Find all PHI nodes that we may be able to merge. */
4475 FOR_EACH_BB (bb)
4477 basic_block dest;
4479 /* Look for a forwarder block with PHI nodes. */
4480 if (!tree_forwarder_block_p (bb, true))
4481 continue;
4483 dest = single_succ (bb);
4485 /* We have to feed into another basic block with PHI
4486 nodes. */
4487 if (!phi_nodes (dest)
4488 /* We don't want to deal with a basic block with
4489 abnormal edges. */
4490 || has_abnormal_incoming_edge_p (bb))
4491 continue;
4493 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4495 /* If BB does not dominate DEST, then the PHI nodes at
4496 DEST must be the only users of the results of the PHI
4497 nodes at BB. */
4498 *current++ = bb;
4502 /* Now let's drain WORKLIST. */
4503 while (current != worklist)
4505 bb = *--current;
4506 remove_forwarder_block_with_phi (bb);
4509 free (worklist);
4512 static bool
4513 gate_merge_phi (void)
4515 return 1;
4518 struct tree_opt_pass pass_merge_phi = {
4519 "mergephi", /* name */
4520 gate_merge_phi, /* gate */
4521 merge_phi_nodes, /* execute */
4522 NULL, /* sub */
4523 NULL, /* next */
4524 0, /* static_pass_number */
4525 TV_TREE_MERGE_PHI, /* tv_id */
4526 PROP_cfg | PROP_ssa, /* properties_required */
4527 0, /* properties_provided */
4528 0, /* properties_destroyed */
4529 0, /* todo_flags_start */
4530 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4531 | TODO_verify_ssa,
4532 0 /* letter */
4535 /* Return a non-special label in the head of basic block BLOCK.
4536 Create one if it doesn't exist. */
4538 tree
4539 tree_block_label (basic_block bb)
4541 block_stmt_iterator i, s = bsi_start (bb);
4542 bool first = true;
4543 tree label, stmt;
4545 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4547 stmt = bsi_stmt (i);
4548 if (TREE_CODE (stmt) != LABEL_EXPR)
4549 break;
4550 label = LABEL_EXPR_LABEL (stmt);
4551 if (!DECL_NONLOCAL (label))
4553 if (!first)
4554 bsi_move_before (&i, &s);
4555 return label;
4559 label = create_artificial_label ();
4560 stmt = build1 (LABEL_EXPR, void_type_node, label);
4561 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4562 return label;
4566 /* Attempt to perform edge redirection by replacing a possibly complex
4567 jump instruction by a goto or by removing the jump completely.
4568 This can apply only if all edges now point to the same block. The
4569 parameters and return values are equivalent to
4570 redirect_edge_and_branch. */
4572 static edge
4573 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4575 basic_block src = e->src;
4576 block_stmt_iterator b;
4577 tree stmt;
4579 /* We can replace or remove a complex jump only when we have exactly
4580 two edges. */
4581 if (EDGE_COUNT (src->succs) != 2
4582 /* Verify that all targets will be TARGET. Specifically, the
4583 edge that is not E must also go to TARGET. */
4584 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4585 return NULL;
4587 b = bsi_last (src);
4588 if (bsi_end_p (b))
4589 return NULL;
4590 stmt = bsi_stmt (b);
4592 if (TREE_CODE (stmt) == COND_EXPR
4593 || TREE_CODE (stmt) == SWITCH_EXPR)
4595 bsi_remove (&b);
4596 e = ssa_redirect_edge (e, target);
4597 e->flags = EDGE_FALLTHRU;
4598 return e;
4601 return NULL;
4605 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4606 edge representing the redirected branch. */
4608 static edge
4609 tree_redirect_edge_and_branch (edge e, basic_block dest)
4611 basic_block bb = e->src;
4612 block_stmt_iterator bsi;
4613 edge ret;
4614 tree label, stmt;
4616 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4617 return NULL;
4619 if (e->src != ENTRY_BLOCK_PTR
4620 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4621 return ret;
4623 if (e->dest == dest)
4624 return NULL;
4626 label = tree_block_label (dest);
4628 bsi = bsi_last (bb);
4629 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4631 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4633 case COND_EXPR:
4634 stmt = (e->flags & EDGE_TRUE_VALUE
4635 ? COND_EXPR_THEN (stmt)
4636 : COND_EXPR_ELSE (stmt));
4637 GOTO_DESTINATION (stmt) = label;
4638 break;
4640 case GOTO_EXPR:
4641 /* No non-abnormal edges should lead from a non-simple goto, and
4642 simple ones should be represented implicitly. */
4643 gcc_unreachable ();
4645 case SWITCH_EXPR:
4647 tree cases = get_cases_for_edge (e, stmt);
4649 /* If we have a list of cases associated with E, then use it
4650 as it's a lot faster than walking the entire case vector. */
4651 if (cases)
4653 edge e2 = find_edge (e->src, dest);
4654 tree last, first;
4656 first = cases;
4657 while (cases)
4659 last = cases;
4660 CASE_LABEL (cases) = label;
4661 cases = TREE_CHAIN (cases);
4664 /* If there was already an edge in the CFG, then we need
4665 to move all the cases associated with E to E2. */
4666 if (e2)
4668 tree cases2 = get_cases_for_edge (e2, stmt);
4670 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4671 TREE_CHAIN (cases2) = first;
4674 else
4676 tree vec = SWITCH_LABELS (stmt);
4677 size_t i, n = TREE_VEC_LENGTH (vec);
4679 for (i = 0; i < n; i++)
4681 tree elt = TREE_VEC_ELT (vec, i);
4683 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4684 CASE_LABEL (elt) = label;
4688 break;
4691 case RETURN_EXPR:
4692 bsi_remove (&bsi);
4693 e->flags |= EDGE_FALLTHRU;
4694 break;
4696 default:
4697 /* Otherwise it must be a fallthru edge, and we don't need to
4698 do anything besides redirecting it. */
4699 gcc_assert (e->flags & EDGE_FALLTHRU);
4700 break;
4703 /* Update/insert PHI nodes as necessary. */
4705 /* Now update the edges in the CFG. */
4706 e = ssa_redirect_edge (e, dest);
4708 return e;
4712 /* Simple wrapper, as we can always redirect fallthru edges. */
4714 static basic_block
4715 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4717 e = tree_redirect_edge_and_branch (e, dest);
4718 gcc_assert (e);
4720 return NULL;
4724 /* Splits basic block BB after statement STMT (but at least after the
4725 labels). If STMT is NULL, BB is split just after the labels. */
4727 static basic_block
4728 tree_split_block (basic_block bb, void *stmt)
4730 block_stmt_iterator bsi, bsi_tgt;
4731 tree act;
4732 basic_block new_bb;
4733 edge e;
4734 edge_iterator ei;
4736 new_bb = create_empty_bb (bb);
4738 /* Redirect the outgoing edges. */
4739 new_bb->succs = bb->succs;
4740 bb->succs = NULL;
4741 FOR_EACH_EDGE (e, ei, new_bb->succs)
4742 e->src = new_bb;
4744 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4745 stmt = NULL;
4747 /* Move everything from BSI to the new basic block. */
4748 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4750 act = bsi_stmt (bsi);
4751 if (TREE_CODE (act) == LABEL_EXPR)
4752 continue;
4754 if (!stmt)
4755 break;
4757 if (stmt == act)
4759 bsi_next (&bsi);
4760 break;
4764 bsi_tgt = bsi_start (new_bb);
4765 while (!bsi_end_p (bsi))
4767 act = bsi_stmt (bsi);
4768 bsi_remove (&bsi);
4769 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4772 return new_bb;
4776 /* Moves basic block BB after block AFTER. */
4778 static bool
4779 tree_move_block_after (basic_block bb, basic_block after)
4781 if (bb->prev_bb == after)
4782 return true;
4784 unlink_block (bb);
4785 link_block (bb, after);
4787 return true;
4791 /* Return true if basic_block can be duplicated. */
4793 static bool
4794 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4796 return true;
4799 /* Create a duplicate of the basic block BB. NOTE: This does not
4800 preserve SSA form. */
4802 static basic_block
4803 tree_duplicate_bb (basic_block bb)
4805 basic_block new_bb;
4806 block_stmt_iterator bsi, bsi_tgt;
4807 tree phi, val;
4808 ssa_op_iter op_iter;
4810 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4812 /* First copy the phi nodes. We do not copy phi node arguments here,
4813 since the edges are not ready yet. Keep the chain of phi nodes in
4814 the same order, so that we can add them later. */
4815 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4817 mark_for_rewrite (PHI_RESULT (phi));
4818 create_phi_node (PHI_RESULT (phi), new_bb);
4820 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4822 bsi_tgt = bsi_start (new_bb);
4823 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4825 tree stmt = bsi_stmt (bsi);
4826 tree copy;
4828 if (TREE_CODE (stmt) == LABEL_EXPR)
4829 continue;
4831 /* Record the definitions. */
4832 get_stmt_operands (stmt);
4834 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4835 mark_for_rewrite (val);
4837 copy = unshare_expr (stmt);
4839 /* Copy also the virtual operands. */
4840 get_stmt_ann (copy);
4841 copy_virtual_operands (copy, stmt);
4843 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4846 return new_bb;
4849 /* Basic block BB_COPY was created by code duplication. Add phi node
4850 arguments for edges going out of BB_COPY. The blocks that were
4851 duplicated have rbi->duplicated set to one. */
4853 void
4854 add_phi_args_after_copy_bb (basic_block bb_copy)
4856 basic_block bb, dest;
4857 edge e, e_copy;
4858 edge_iterator ei;
4859 tree phi, phi_copy, phi_next, def;
4861 bb = bb_copy->rbi->original;
4863 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4865 if (!phi_nodes (e_copy->dest))
4866 continue;
4868 if (e_copy->dest->rbi->duplicated)
4869 dest = e_copy->dest->rbi->original;
4870 else
4871 dest = e_copy->dest;
4873 e = find_edge (bb, dest);
4874 if (!e)
4876 /* During loop unrolling the target of the latch edge is copied.
4877 In this case we are not looking for edge to dest, but to
4878 duplicated block whose original was dest. */
4879 FOR_EACH_EDGE (e, ei, bb->succs)
4880 if (e->dest->rbi->duplicated
4881 && e->dest->rbi->original == dest)
4882 break;
4884 gcc_assert (e != NULL);
4887 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4888 phi;
4889 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4891 phi_next = PHI_CHAIN (phi);
4893 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4894 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4895 add_phi_arg (phi_copy, def, e_copy);
4900 /* Blocks in REGION_COPY array of length N_REGION were created by
4901 duplication of basic blocks. Add phi node arguments for edges
4902 going from these blocks. */
4904 void
4905 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4907 unsigned i;
4909 for (i = 0; i < n_region; i++)
4910 region_copy[i]->rbi->duplicated = 1;
4912 for (i = 0; i < n_region; i++)
4913 add_phi_args_after_copy_bb (region_copy[i]);
4915 for (i = 0; i < n_region; i++)
4916 region_copy[i]->rbi->duplicated = 0;
4919 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4921 struct ssa_name_map_entry
4923 tree from_name;
4924 tree to_name;
4927 /* Hash function for ssa_name_map_entry. */
4929 static hashval_t
4930 ssa_name_map_entry_hash (const void *entry)
4932 const struct ssa_name_map_entry *en = entry;
4933 return SSA_NAME_VERSION (en->from_name);
4936 /* Equality function for ssa_name_map_entry. */
4938 static int
4939 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4941 const struct ssa_name_map_entry *en = in_table;
4943 return en->from_name == ssa_name;
4946 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4947 to MAP. */
4949 void
4950 allocate_ssa_names (bitmap definitions, htab_t *map)
4952 tree name;
4953 struct ssa_name_map_entry *entry;
4954 PTR *slot;
4955 unsigned ver;
4956 bitmap_iterator bi;
4958 if (!*map)
4959 *map = htab_create (10, ssa_name_map_entry_hash,
4960 ssa_name_map_entry_eq, free);
4961 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4963 name = ssa_name (ver);
4964 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4965 INSERT);
4966 if (*slot)
4967 entry = *slot;
4968 else
4970 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4971 entry->from_name = name;
4972 *slot = entry;
4974 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4978 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4979 by the mapping MAP. */
4981 static void
4982 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4984 tree name = DEF_FROM_PTR (def);
4985 struct ssa_name_map_entry *entry;
4987 gcc_assert (TREE_CODE (name) == SSA_NAME);
4989 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4990 if (!entry)
4991 return;
4993 SET_DEF (def, entry->to_name);
4994 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4997 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4999 static void
5000 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
5002 tree name = USE_FROM_PTR (use);
5003 struct ssa_name_map_entry *entry;
5005 if (TREE_CODE (name) != SSA_NAME)
5006 return;
5008 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
5009 if (!entry)
5010 return;
5012 SET_USE (use, entry->to_name);
5015 /* Rewrite the ssa names in basic block BB to new ones as specified by the
5016 mapping MAP. */
5018 void
5019 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
5021 unsigned i;
5022 edge e;
5023 edge_iterator ei;
5024 tree phi, stmt;
5025 block_stmt_iterator bsi;
5026 use_optype uses;
5027 vuse_optype vuses;
5028 def_optype defs;
5029 v_may_def_optype v_may_defs;
5030 v_must_def_optype v_must_defs;
5031 stmt_ann_t ann;
5033 FOR_EACH_EDGE (e, ei, bb->preds)
5034 if (e->flags & EDGE_ABNORMAL)
5035 break;
5037 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5039 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
5040 if (e)
5041 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
5044 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5046 stmt = bsi_stmt (bsi);
5047 get_stmt_operands (stmt);
5048 ann = stmt_ann (stmt);
5050 uses = USE_OPS (ann);
5051 for (i = 0; i < NUM_USES (uses); i++)
5052 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
5054 defs = DEF_OPS (ann);
5055 for (i = 0; i < NUM_DEFS (defs); i++)
5056 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
5058 vuses = VUSE_OPS (ann);
5059 for (i = 0; i < NUM_VUSES (vuses); i++)
5060 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
5062 v_may_defs = V_MAY_DEF_OPS (ann);
5063 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
5065 rewrite_to_new_ssa_names_use
5066 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
5067 rewrite_to_new_ssa_names_def
5068 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
5071 v_must_defs = V_MUST_DEF_OPS (ann);
5072 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
5074 rewrite_to_new_ssa_names_def
5075 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
5076 rewrite_to_new_ssa_names_use
5077 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
5081 FOR_EACH_EDGE (e, ei, bb->succs)
5082 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
5084 rewrite_to_new_ssa_names_use
5085 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
5087 if (e->flags & EDGE_ABNORMAL)
5089 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
5090 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
5095 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
5096 by the mapping MAP. */
5098 void
5099 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
5101 unsigned r;
5103 for (r = 0; r < n_region; r++)
5104 rewrite_to_new_ssa_names_bb (region[r], map);
5107 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5108 important exit edge EXIT. By important we mean that no SSA name defined
5109 inside region is live over the other exit edges of the region. All entry
5110 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5111 to the duplicate of the region. SSA form, dominance and loop information
5112 is updated. The new basic blocks are stored to REGION_COPY in the same
5113 order as they had in REGION, provided that REGION_COPY is not NULL.
5114 The function returns false if it is unable to copy the region,
5115 true otherwise. */
5117 bool
5118 tree_duplicate_sese_region (edge entry, edge exit,
5119 basic_block *region, unsigned n_region,
5120 basic_block *region_copy)
5122 unsigned i, n_doms, ver;
5123 bool free_region_copy = false, copying_header = false;
5124 struct loop *loop = entry->dest->loop_father;
5125 edge exit_copy;
5126 bitmap definitions;
5127 tree phi;
5128 basic_block *doms;
5129 htab_t ssa_name_map = NULL;
5130 edge redirected;
5131 bitmap_iterator bi;
5133 if (!can_copy_bbs_p (region, n_region))
5134 return false;
5136 /* Some sanity checking. Note that we do not check for all possible
5137 missuses of the functions. I.e. if you ask to copy something weird,
5138 it will work, but the state of structures probably will not be
5139 correct. */
5141 for (i = 0; i < n_region; i++)
5143 /* We do not handle subloops, i.e. all the blocks must belong to the
5144 same loop. */
5145 if (region[i]->loop_father != loop)
5146 return false;
5148 if (region[i] != entry->dest
5149 && region[i] == loop->header)
5150 return false;
5153 loop->copy = loop;
5155 /* In case the function is used for loop header copying (which is the primary
5156 use), ensure that EXIT and its copy will be new latch and entry edges. */
5157 if (loop->header == entry->dest)
5159 copying_header = true;
5160 loop->copy = loop->outer;
5162 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5163 return false;
5165 for (i = 0; i < n_region; i++)
5166 if (region[i] != exit->src
5167 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5168 return false;
5171 if (!region_copy)
5173 region_copy = xmalloc (sizeof (basic_block) * n_region);
5174 free_region_copy = true;
5177 gcc_assert (!any_marked_for_rewrite_p ());
5179 /* Record blocks outside the region that are duplicated by something
5180 inside. */
5181 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
5182 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
5184 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
5185 definitions = marked_ssa_names ();
5187 if (copying_header)
5189 loop->header = exit->dest;
5190 loop->latch = exit->src;
5193 /* Redirect the entry and add the phi node arguments. */
5194 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
5195 gcc_assert (redirected != NULL);
5196 flush_pending_stmts (entry);
5198 /* Concerning updating of dominators: We must recount dominators
5199 for entry block and its copy. Anything that is outside of the region, but
5200 was dominated by something inside needs recounting as well. */
5201 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5202 doms[n_doms++] = entry->dest->rbi->original;
5203 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
5204 free (doms);
5206 /* Add the other phi node arguments. */
5207 add_phi_args_after_copy (region_copy, n_region);
5209 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
5210 uses, it should be possible to emit phi nodes just for definitions that
5211 are used outside region. */
5212 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
5214 tree name = ssa_name (ver);
5216 phi = create_phi_node (name, exit->dest);
5217 add_phi_arg (phi, name, exit);
5218 add_phi_arg (phi, name, exit_copy);
5220 SSA_NAME_DEF_STMT (name) = phi;
5223 /* And create new definitions inside region and its copy. TODO -- once we
5224 have immediate uses, it might be better to leave definitions in region
5225 unchanged, create new ssa names for phi nodes on exit, and rewrite
5226 the uses, to avoid changing the copied region. */
5227 allocate_ssa_names (definitions, &ssa_name_map);
5228 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
5229 allocate_ssa_names (definitions, &ssa_name_map);
5230 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
5231 htab_delete (ssa_name_map);
5233 if (free_region_copy)
5234 free (region_copy);
5236 unmark_all_for_rewrite ();
5237 BITMAP_FREE (definitions);
5239 return true;
5242 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5244 void
5245 dump_function_to_file (tree fn, FILE *file, int flags)
5247 tree arg, vars, var;
5248 bool ignore_topmost_bind = false, any_var = false;
5249 basic_block bb;
5250 tree chain;
5252 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5254 arg = DECL_ARGUMENTS (fn);
5255 while (arg)
5257 print_generic_expr (file, arg, dump_flags);
5258 if (TREE_CHAIN (arg))
5259 fprintf (file, ", ");
5260 arg = TREE_CHAIN (arg);
5262 fprintf (file, ")\n");
5264 if (flags & TDF_RAW)
5266 dump_node (fn, TDF_SLIM | flags, file);
5267 return;
5270 /* When GIMPLE is lowered, the variables are no longer available in
5271 BIND_EXPRs, so display them separately. */
5272 if (cfun && cfun->unexpanded_var_list)
5274 ignore_topmost_bind = true;
5276 fprintf (file, "{\n");
5277 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5279 var = TREE_VALUE (vars);
5281 print_generic_decl (file, var, flags);
5282 fprintf (file, "\n");
5284 any_var = true;
5288 if (basic_block_info)
5290 /* Make a CFG based dump. */
5291 check_bb_profile (ENTRY_BLOCK_PTR, file);
5292 if (!ignore_topmost_bind)
5293 fprintf (file, "{\n");
5295 if (any_var && n_basic_blocks)
5296 fprintf (file, "\n");
5298 FOR_EACH_BB (bb)
5299 dump_generic_bb (file, bb, 2, flags);
5301 fprintf (file, "}\n");
5302 check_bb_profile (EXIT_BLOCK_PTR, file);
5304 else
5306 int indent;
5308 /* Make a tree based dump. */
5309 chain = DECL_SAVED_TREE (fn);
5311 if (TREE_CODE (chain) == BIND_EXPR)
5313 if (ignore_topmost_bind)
5315 chain = BIND_EXPR_BODY (chain);
5316 indent = 2;
5318 else
5319 indent = 0;
5321 else
5323 if (!ignore_topmost_bind)
5324 fprintf (file, "{\n");
5325 indent = 2;
5328 if (any_var)
5329 fprintf (file, "\n");
5331 print_generic_stmt_indented (file, chain, flags, indent);
5332 if (ignore_topmost_bind)
5333 fprintf (file, "}\n");
5336 fprintf (file, "\n\n");
5340 /* Pretty print of the loops intermediate representation. */
5341 static void print_loop (FILE *, struct loop *, int);
5342 static void print_pred_bbs (FILE *, basic_block bb);
5343 static void print_succ_bbs (FILE *, basic_block bb);
5346 /* Print the predecessors indexes of edge E on FILE. */
5348 static void
5349 print_pred_bbs (FILE *file, basic_block bb)
5351 edge e;
5352 edge_iterator ei;
5354 FOR_EACH_EDGE (e, ei, bb->preds)
5355 fprintf (file, "bb_%d", e->src->index);
5359 /* Print the successors indexes of edge E on FILE. */
5361 static void
5362 print_succ_bbs (FILE *file, basic_block bb)
5364 edge e;
5365 edge_iterator ei;
5367 FOR_EACH_EDGE (e, ei, bb->succs)
5368 fprintf (file, "bb_%d", e->src->index);
5372 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5374 static void
5375 print_loop (FILE *file, struct loop *loop, int indent)
5377 char *s_indent;
5378 basic_block bb;
5380 if (loop == NULL)
5381 return;
5383 s_indent = (char *) alloca ((size_t) indent + 1);
5384 memset ((void *) s_indent, ' ', (size_t) indent);
5385 s_indent[indent] = '\0';
5387 /* Print the loop's header. */
5388 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5390 /* Print the loop's body. */
5391 fprintf (file, "%s{\n", s_indent);
5392 FOR_EACH_BB (bb)
5393 if (bb->loop_father == loop)
5395 /* Print the basic_block's header. */
5396 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5397 print_pred_bbs (file, bb);
5398 fprintf (file, "}, succs = {");
5399 print_succ_bbs (file, bb);
5400 fprintf (file, "})\n");
5402 /* Print the basic_block's body. */
5403 fprintf (file, "%s {\n", s_indent);
5404 tree_dump_bb (bb, file, indent + 4);
5405 fprintf (file, "%s }\n", s_indent);
5408 print_loop (file, loop->inner, indent + 2);
5409 fprintf (file, "%s}\n", s_indent);
5410 print_loop (file, loop->next, indent);
5414 /* Follow a CFG edge from the entry point of the program, and on entry
5415 of a loop, pretty print the loop structure on FILE. */
5417 void
5418 print_loop_ir (FILE *file)
5420 basic_block bb;
5422 bb = BASIC_BLOCK (0);
5423 if (bb && bb->loop_father)
5424 print_loop (file, bb->loop_father, 0);
5428 /* Debugging loops structure at tree level. */
5430 void
5431 debug_loop_ir (void)
5433 print_loop_ir (stderr);
5437 /* Return true if BB ends with a call, possibly followed by some
5438 instructions that must stay with the call. Return false,
5439 otherwise. */
5441 static bool
5442 tree_block_ends_with_call_p (basic_block bb)
5444 block_stmt_iterator bsi = bsi_last (bb);
5445 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5449 /* Return true if BB ends with a conditional branch. Return false,
5450 otherwise. */
5452 static bool
5453 tree_block_ends_with_condjump_p (basic_block bb)
5455 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5456 return (TREE_CODE (stmt) == COND_EXPR);
5460 /* Return true if we need to add fake edge to exit at statement T.
5461 Helper function for tree_flow_call_edges_add. */
5463 static bool
5464 need_fake_edge_p (tree t)
5466 tree call;
5468 /* NORETURN and LONGJMP calls already have an edge to exit.
5469 CONST and PURE calls do not need one.
5470 We don't currently check for CONST and PURE here, although
5471 it would be a good idea, because those attributes are
5472 figured out from the RTL in mark_constant_function, and
5473 the counter incrementation code from -fprofile-arcs
5474 leads to different results from -fbranch-probabilities. */
5475 call = get_call_expr_in (t);
5476 if (call
5477 && !(call_expr_flags (call) & ECF_NORETURN))
5478 return true;
5480 if (TREE_CODE (t) == ASM_EXPR
5481 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5482 return true;
5484 return false;
5488 /* Add fake edges to the function exit for any non constant and non
5489 noreturn calls, volatile inline assembly in the bitmap of blocks
5490 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5491 the number of blocks that were split.
5493 The goal is to expose cases in which entering a basic block does
5494 not imply that all subsequent instructions must be executed. */
5496 static int
5497 tree_flow_call_edges_add (sbitmap blocks)
5499 int i;
5500 int blocks_split = 0;
5501 int last_bb = last_basic_block;
5502 bool check_last_block = false;
5504 if (n_basic_blocks == 0)
5505 return 0;
5507 if (! blocks)
5508 check_last_block = true;
5509 else
5510 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5512 /* In the last basic block, before epilogue generation, there will be
5513 a fallthru edge to EXIT. Special care is required if the last insn
5514 of the last basic block is a call because make_edge folds duplicate
5515 edges, which would result in the fallthru edge also being marked
5516 fake, which would result in the fallthru edge being removed by
5517 remove_fake_edges, which would result in an invalid CFG.
5519 Moreover, we can't elide the outgoing fake edge, since the block
5520 profiler needs to take this into account in order to solve the minimal
5521 spanning tree in the case that the call doesn't return.
5523 Handle this by adding a dummy instruction in a new last basic block. */
5524 if (check_last_block)
5526 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5527 block_stmt_iterator bsi = bsi_last (bb);
5528 tree t = NULL_TREE;
5529 if (!bsi_end_p (bsi))
5530 t = bsi_stmt (bsi);
5532 if (need_fake_edge_p (t))
5534 edge e;
5536 e = find_edge (bb, EXIT_BLOCK_PTR);
5537 if (e)
5539 bsi_insert_on_edge (e, build_empty_stmt ());
5540 bsi_commit_edge_inserts ();
5545 /* Now add fake edges to the function exit for any non constant
5546 calls since there is no way that we can determine if they will
5547 return or not... */
5548 for (i = 0; i < last_bb; i++)
5550 basic_block bb = BASIC_BLOCK (i);
5551 block_stmt_iterator bsi;
5552 tree stmt, last_stmt;
5554 if (!bb)
5555 continue;
5557 if (blocks && !TEST_BIT (blocks, i))
5558 continue;
5560 bsi = bsi_last (bb);
5561 if (!bsi_end_p (bsi))
5563 last_stmt = bsi_stmt (bsi);
5566 stmt = bsi_stmt (bsi);
5567 if (need_fake_edge_p (stmt))
5569 edge e;
5570 /* The handling above of the final block before the
5571 epilogue should be enough to verify that there is
5572 no edge to the exit block in CFG already.
5573 Calling make_edge in such case would cause us to
5574 mark that edge as fake and remove it later. */
5575 #ifdef ENABLE_CHECKING
5576 if (stmt == last_stmt)
5578 e = find_edge (bb, EXIT_BLOCK_PTR);
5579 gcc_assert (e == NULL);
5581 #endif
5583 /* Note that the following may create a new basic block
5584 and renumber the existing basic blocks. */
5585 if (stmt != last_stmt)
5587 e = split_block (bb, stmt);
5588 if (e)
5589 blocks_split++;
5591 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5593 bsi_prev (&bsi);
5595 while (!bsi_end_p (bsi));
5599 if (blocks_split)
5600 verify_flow_info ();
5602 return blocks_split;
5605 bool
5606 tree_purge_dead_eh_edges (basic_block bb)
5608 bool changed = false;
5609 edge e;
5610 edge_iterator ei;
5611 tree stmt = last_stmt (bb);
5613 if (stmt && tree_can_throw_internal (stmt))
5614 return false;
5616 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5618 if (e->flags & EDGE_EH)
5620 remove_edge (e);
5621 changed = true;
5623 else
5624 ei_next (&ei);
5627 /* Removal of dead EH edges might change dominators of not
5628 just immediate successors. E.g. when bb1 is changed so that
5629 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5630 eh edges purged by this function in:
5634 1-->2
5635 / \ |
5636 v v |
5637 3-->4 |
5639 --->5
5642 idom(bb5) must be recomputed. For now just free the dominance
5643 info. */
5644 if (changed)
5645 free_dominance_info (CDI_DOMINATORS);
5647 return changed;
5650 bool
5651 tree_purge_all_dead_eh_edges (bitmap blocks)
5653 bool changed = false;
5654 unsigned i;
5655 bitmap_iterator bi;
5657 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5659 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5662 return changed;
5665 /* This function is called whenever a new edge is created or
5666 redirected. */
5668 static void
5669 tree_execute_on_growing_pred (edge e)
5671 basic_block bb = e->dest;
5673 if (phi_nodes (bb))
5674 reserve_phi_args_for_new_edge (bb);
5677 /* This function is called immediately before edge E is removed from
5678 the edge vector E->dest->preds. */
5680 static void
5681 tree_execute_on_shrinking_pred (edge e)
5683 if (phi_nodes (e->dest))
5684 remove_phi_args (e);
5687 struct cfg_hooks tree_cfg_hooks = {
5688 "tree",
5689 tree_verify_flow_info,
5690 tree_dump_bb, /* dump_bb */
5691 create_bb, /* create_basic_block */
5692 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5693 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5694 remove_bb, /* delete_basic_block */
5695 tree_split_block, /* split_block */
5696 tree_move_block_after, /* move_block_after */
5697 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5698 tree_merge_blocks, /* merge_blocks */
5699 tree_predict_edge, /* predict_edge */
5700 tree_predicted_by_p, /* predicted_by_p */
5701 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5702 tree_duplicate_bb, /* duplicate_block */
5703 tree_split_edge, /* split_edge */
5704 tree_make_forwarder_block, /* make_forward_block */
5705 NULL, /* tidy_fallthru_edge */
5706 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5707 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5708 tree_flow_call_edges_add, /* flow_call_edges_add */
5709 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5710 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5714 /* Split all critical edges. */
5716 static void
5717 split_critical_edges (void)
5719 basic_block bb;
5720 edge e;
5721 edge_iterator ei;
5723 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5724 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5725 mappings around the calls to split_edge. */
5726 start_recording_case_labels ();
5727 FOR_ALL_BB (bb)
5729 FOR_EACH_EDGE (e, ei, bb->succs)
5730 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5732 split_edge (e);
5735 end_recording_case_labels ();
5738 struct tree_opt_pass pass_split_crit_edges =
5740 "crited", /* name */
5741 NULL, /* gate */
5742 split_critical_edges, /* execute */
5743 NULL, /* sub */
5744 NULL, /* next */
5745 0, /* static_pass_number */
5746 TV_TREE_SPLIT_EDGES, /* tv_id */
5747 PROP_cfg, /* properties required */
5748 PROP_no_crit_edges, /* properties_provided */
5749 0, /* properties_destroyed */
5750 0, /* todo_flags_start */
5751 TODO_dump_func, /* todo_flags_finish */
5752 0 /* letter */
5756 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5757 a temporary, make sure and register it to be renamed if necessary,
5758 and finally return the temporary. Put the statements to compute
5759 EXP before the current statement in BSI. */
5761 tree
5762 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5764 tree t, new_stmt, orig_stmt;
5766 if (is_gimple_val (exp))
5767 return exp;
5769 t = make_rename_temp (type, NULL);
5770 new_stmt = build (MODIFY_EXPR, type, t, exp);
5772 orig_stmt = bsi_stmt (*bsi);
5773 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5774 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5776 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5778 return t;
5781 /* Build a ternary operation and gimplify it. Emit code before BSI.
5782 Return the gimple_val holding the result. */
5784 tree
5785 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5786 tree type, tree a, tree b, tree c)
5788 tree ret;
5790 ret = fold (build3 (code, type, a, b, c));
5791 STRIP_NOPS (ret);
5793 return gimplify_val (bsi, type, ret);
5796 /* Build a binary operation and gimplify it. Emit code before BSI.
5797 Return the gimple_val holding the result. */
5799 tree
5800 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5801 tree type, tree a, tree b)
5803 tree ret;
5805 ret = fold (build2 (code, type, a, b));
5806 STRIP_NOPS (ret);
5808 return gimplify_val (bsi, type, ret);
5811 /* Build a unary operation and gimplify it. Emit code before BSI.
5812 Return the gimple_val holding the result. */
5814 tree
5815 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5816 tree a)
5818 tree ret;
5820 ret = fold (build1 (code, type, a));
5821 STRIP_NOPS (ret);
5823 return gimplify_val (bsi, type, ret);
5828 /* Emit return warnings. */
5830 static void
5831 execute_warn_function_return (void)
5833 #ifdef USE_MAPPED_LOCATION
5834 source_location location;
5835 #else
5836 location_t *locus;
5837 #endif
5838 tree last;
5839 edge e;
5840 edge_iterator ei;
5842 if (warn_missing_noreturn
5843 && !TREE_THIS_VOLATILE (cfun->decl)
5844 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5845 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5846 warning ("%Jfunction might be possible candidate for "
5847 "attribute %<noreturn%>",
5848 cfun->decl);
5850 /* If we have a path to EXIT, then we do return. */
5851 if (TREE_THIS_VOLATILE (cfun->decl)
5852 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5854 #ifdef USE_MAPPED_LOCATION
5855 location = UNKNOWN_LOCATION;
5856 #else
5857 locus = NULL;
5858 #endif
5859 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5861 last = last_stmt (e->src);
5862 if (TREE_CODE (last) == RETURN_EXPR
5863 #ifdef USE_MAPPED_LOCATION
5864 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5865 #else
5866 && (locus = EXPR_LOCUS (last)) != NULL)
5867 #endif
5868 break;
5870 #ifdef USE_MAPPED_LOCATION
5871 if (location == UNKNOWN_LOCATION)
5872 location = cfun->function_end_locus;
5873 warning ("%H%<noreturn%> function does return", &location);
5874 #else
5875 if (!locus)
5876 locus = &cfun->function_end_locus;
5877 warning ("%H%<noreturn%> function does return", locus);
5878 #endif
5881 /* If we see "return;" in some basic block, then we do reach the end
5882 without returning a value. */
5883 else if (warn_return_type
5884 && !TREE_NO_WARNING (cfun->decl)
5885 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5886 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5888 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5890 tree last = last_stmt (e->src);
5891 if (TREE_CODE (last) == RETURN_EXPR
5892 && TREE_OPERAND (last, 0) == NULL)
5894 #ifdef USE_MAPPED_LOCATION
5895 location = EXPR_LOCATION (last);
5896 if (location == UNKNOWN_LOCATION)
5897 location = cfun->function_end_locus;
5898 warning ("%Hcontrol reaches end of non-void function", &location);
5899 #else
5900 locus = EXPR_LOCUS (last);
5901 if (!locus)
5902 locus = &cfun->function_end_locus;
5903 warning ("%Hcontrol reaches end of non-void function", locus);
5904 #endif
5905 TREE_NO_WARNING (cfun->decl) = 1;
5906 break;
5913 /* Given a basic block B which ends with a conditional and has
5914 precisely two successors, determine which of the edges is taken if
5915 the conditional is true and which is taken if the conditional is
5916 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5918 void
5919 extract_true_false_edges_from_block (basic_block b,
5920 edge *true_edge,
5921 edge *false_edge)
5923 edge e = EDGE_SUCC (b, 0);
5925 if (e->flags & EDGE_TRUE_VALUE)
5927 *true_edge = e;
5928 *false_edge = EDGE_SUCC (b, 1);
5930 else
5932 *false_edge = e;
5933 *true_edge = EDGE_SUCC (b, 1);
5937 struct tree_opt_pass pass_warn_function_return =
5939 NULL, /* name */
5940 NULL, /* gate */
5941 execute_warn_function_return, /* execute */
5942 NULL, /* sub */
5943 NULL, /* next */
5944 0, /* static_pass_number */
5945 0, /* tv_id */
5946 PROP_cfg, /* properties_required */
5947 0, /* properties_provided */
5948 0, /* properties_destroyed */
5949 0, /* todo_flags_start */
5950 0, /* todo_flags_finish */
5951 0 /* letter */
5954 #include "gt-tree-cfg.h"