* configure.ac (target_header_dir): vfork is a stub under djgpp.
[official-gcc.git] / gcc / tree-cfg.c
blobb87ce54c29179eb2deaee3e9cdb08ad2070316f3
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59 static GTY(()) varray_type label_to_block_map;
61 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
74 struct edge_to_cases_elt
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
86 static htab_t edge_to_cases;
88 /* CFG statistics. */
89 struct cfg_stats_d
91 long num_merged_labels;
94 static struct cfg_stats_d cfg_stats;
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
99 /* Basic blocks and flowgraphs. */
100 static basic_block create_bb (void *, void *, basic_block);
101 static void create_block_annotation (basic_block);
102 static void free_blocks_annotations (void);
103 static void clear_blocks_annotations (void);
104 static void make_blocks (tree);
105 static void factor_computed_gotos (void);
107 /* Edges. */
108 static void make_edges (void);
109 static void make_ctrl_stmt_edges (basic_block);
110 static void make_exit_edges (basic_block);
111 static void make_cond_expr_edges (basic_block);
112 static void make_switch_expr_edges (basic_block);
113 static void make_goto_expr_edges (basic_block);
114 static edge tree_redirect_edge_and_branch (edge, basic_block);
115 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116 static void split_critical_edges (void);
117 static bool remove_fallthru_edge (VEC(edge) *);
119 /* Various helpers. */
120 static inline bool stmt_starts_bb_p (tree, tree);
121 static int tree_verify_flow_info (void);
122 static void tree_make_forwarder_block (edge);
123 static bool tree_forwarder_block_p (basic_block, bool);
124 static void tree_cfg2vcg (FILE *);
126 /* Flowgraph optimization and cleanup. */
127 static void tree_merge_blocks (basic_block, basic_block);
128 static bool tree_can_merge_blocks_p (basic_block, basic_block);
129 static void remove_bb (basic_block);
130 static bool cleanup_control_flow (void);
131 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
132 static edge find_taken_edge_cond_expr (basic_block, tree);
133 static edge find_taken_edge_switch_expr (basic_block, tree);
134 static tree find_case_label_for_value (tree, tree);
135 static bool phi_alternatives_equal (basic_block, edge, edge);
136 static bool cleanup_forwarder_blocks (void);
139 /*---------------------------------------------------------------------------
140 Create basic blocks
141 ---------------------------------------------------------------------------*/
143 /* Entry point to the CFG builder for trees. TP points to the list of
144 statements to be added to the flowgraph. */
146 static void
147 build_tree_cfg (tree *tp)
149 /* Register specific tree functions. */
150 tree_register_cfg_hooks ();
152 /* Initialize rbi_pool. */
153 alloc_rbi_pool ();
155 /* Initialize the basic block array. */
156 init_flow ();
157 profile_status = PROFILE_ABSENT;
158 n_basic_blocks = 0;
159 last_basic_block = 0;
160 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
161 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
163 /* Build a mapping of labels to their associated blocks. */
164 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
165 "label to block map");
167 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
168 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
170 found_computed_goto = 0;
171 make_blocks (*tp);
173 /* Computed gotos are hell to deal with, especially if there are
174 lots of them with a large number of destinations. So we factor
175 them to a common computed goto location before we build the
176 edge list. After we convert back to normal form, we will un-factor
177 the computed gotos since factoring introduces an unwanted jump. */
178 if (found_computed_goto)
179 factor_computed_gotos ();
181 /* Make sure there is always at least one block, even if it's empty. */
182 if (n_basic_blocks == 0)
183 create_empty_bb (ENTRY_BLOCK_PTR);
185 create_block_annotation (ENTRY_BLOCK_PTR);
186 create_block_annotation (EXIT_BLOCK_PTR);
188 /* Adjust the size of the array. */
189 VARRAY_GROW (basic_block_info, n_basic_blocks);
191 /* To speed up statement iterator walks, we first purge dead labels. */
192 cleanup_dead_labels ();
194 /* Group case nodes to reduce the number of edges.
195 We do this after cleaning up dead labels because otherwise we miss
196 a lot of obvious case merging opportunities. */
197 group_case_labels ();
199 /* Create the edges of the flowgraph. */
200 make_edges ();
202 /* Debugging dumps. */
204 /* Write the flowgraph to a VCG file. */
206 int local_dump_flags;
207 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
208 if (dump_file)
210 tree_cfg2vcg (dump_file);
211 dump_end (TDI_vcg, dump_file);
215 /* Dump a textual representation of the flowgraph. */
216 if (dump_file)
217 dump_tree_cfg (dump_file, dump_flags);
220 static void
221 execute_build_cfg (void)
223 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
226 struct tree_opt_pass pass_build_cfg =
228 "cfg", /* name */
229 NULL, /* gate */
230 execute_build_cfg, /* execute */
231 NULL, /* sub */
232 NULL, /* next */
233 0, /* static_pass_number */
234 TV_TREE_CFG, /* tv_id */
235 PROP_gimple_leh, /* properties_required */
236 PROP_cfg, /* properties_provided */
237 0, /* properties_destroyed */
238 0, /* todo_flags_start */
239 TODO_verify_stmts, /* todo_flags_finish */
240 0 /* letter */
243 /* Search the CFG for any computed gotos. If found, factor them to a
244 common computed goto site. Also record the location of that site so
245 that we can un-factor the gotos after we have converted back to
246 normal form. */
248 static void
249 factor_computed_gotos (void)
251 basic_block bb;
252 tree factored_label_decl = NULL;
253 tree var = NULL;
254 tree factored_computed_goto_label = NULL;
255 tree factored_computed_goto = NULL;
257 /* We know there are one or more computed gotos in this function.
258 Examine the last statement in each basic block to see if the block
259 ends with a computed goto. */
261 FOR_EACH_BB (bb)
263 block_stmt_iterator bsi = bsi_last (bb);
264 tree last;
266 if (bsi_end_p (bsi))
267 continue;
268 last = bsi_stmt (bsi);
270 /* Ignore the computed goto we create when we factor the original
271 computed gotos. */
272 if (last == factored_computed_goto)
273 continue;
275 /* If the last statement is a computed goto, factor it. */
276 if (computed_goto_p (last))
278 tree assignment;
280 /* The first time we find a computed goto we need to create
281 the factored goto block and the variable each original
282 computed goto will use for their goto destination. */
283 if (! factored_computed_goto)
285 basic_block new_bb = create_empty_bb (bb);
286 block_stmt_iterator new_bsi = bsi_start (new_bb);
288 /* Create the destination of the factored goto. Each original
289 computed goto will put its desired destination into this
290 variable and jump to the label we create immediately
291 below. */
292 var = create_tmp_var (ptr_type_node, "gotovar");
294 /* Build a label for the new block which will contain the
295 factored computed goto. */
296 factored_label_decl = create_artificial_label ();
297 factored_computed_goto_label
298 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
299 bsi_insert_after (&new_bsi, factored_computed_goto_label,
300 BSI_NEW_STMT);
302 /* Build our new computed goto. */
303 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
304 bsi_insert_after (&new_bsi, factored_computed_goto,
305 BSI_NEW_STMT);
308 /* Copy the original computed goto's destination into VAR. */
309 assignment = build (MODIFY_EXPR, ptr_type_node,
310 var, GOTO_DESTINATION (last));
311 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
313 /* And re-vector the computed goto to the new destination. */
314 GOTO_DESTINATION (last) = factored_label_decl;
320 /* Create annotations for a single basic block. */
322 static void
323 create_block_annotation (basic_block bb)
325 /* Verify that the tree_annotations field is clear. */
326 gcc_assert (!bb->tree_annotations);
327 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
331 /* Free the annotations for all the basic blocks. */
333 static void free_blocks_annotations (void)
335 clear_blocks_annotations ();
339 /* Clear the annotations for all the basic blocks. */
341 static void
342 clear_blocks_annotations (void)
344 basic_block bb;
346 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
347 bb->tree_annotations = NULL;
351 /* Build a flowgraph for the statement_list STMT_LIST. */
353 static void
354 make_blocks (tree stmt_list)
356 tree_stmt_iterator i = tsi_start (stmt_list);
357 tree stmt = NULL;
358 bool start_new_block = true;
359 bool first_stmt_of_list = true;
360 basic_block bb = ENTRY_BLOCK_PTR;
362 while (!tsi_end_p (i))
364 tree prev_stmt;
366 prev_stmt = stmt;
367 stmt = tsi_stmt (i);
369 /* If the statement starts a new basic block or if we have determined
370 in a previous pass that we need to create a new block for STMT, do
371 so now. */
372 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
374 if (!first_stmt_of_list)
375 stmt_list = tsi_split_statement_list_before (&i);
376 bb = create_basic_block (stmt_list, NULL, bb);
377 start_new_block = false;
380 /* Now add STMT to BB and create the subgraphs for special statement
381 codes. */
382 set_bb_for_stmt (stmt, bb);
384 if (computed_goto_p (stmt))
385 found_computed_goto = true;
387 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
388 next iteration. */
389 if (stmt_ends_bb_p (stmt))
390 start_new_block = true;
392 tsi_next (&i);
393 first_stmt_of_list = false;
398 /* Create and return a new empty basic block after bb AFTER. */
400 static basic_block
401 create_bb (void *h, void *e, basic_block after)
403 basic_block bb;
405 gcc_assert (!e);
407 /* Create and initialize a new basic block. Since alloc_block uses
408 ggc_alloc_cleared to allocate a basic block, we do not have to
409 clear the newly allocated basic block here. */
410 bb = alloc_block ();
412 bb->index = last_basic_block;
413 bb->flags = BB_NEW;
414 bb->stmt_list = h ? h : alloc_stmt_list ();
416 /* Add the new block to the linked list of blocks. */
417 link_block (bb, after);
419 /* Grow the basic block array if needed. */
420 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
422 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
423 VARRAY_GROW (basic_block_info, new_size);
426 /* Add the newly created block to the array. */
427 BASIC_BLOCK (last_basic_block) = bb;
429 create_block_annotation (bb);
431 n_basic_blocks++;
432 last_basic_block++;
434 initialize_bb_rbi (bb);
435 return bb;
439 /*---------------------------------------------------------------------------
440 Edge creation
441 ---------------------------------------------------------------------------*/
443 /* Fold COND_EXPR_COND of each COND_EXPR. */
445 static void
446 fold_cond_expr_cond (void)
448 basic_block bb;
450 FOR_EACH_BB (bb)
452 tree stmt = last_stmt (bb);
454 if (stmt
455 && TREE_CODE (stmt) == COND_EXPR)
457 tree cond = fold (COND_EXPR_COND (stmt));
458 if (integer_zerop (cond))
459 COND_EXPR_COND (stmt) = integer_zero_node;
460 else if (integer_onep (cond))
461 COND_EXPR_COND (stmt) = integer_one_node;
466 /* Join all the blocks in the flowgraph. */
468 static void
469 make_edges (void)
471 basic_block bb;
473 /* Create an edge from entry to the first block with executable
474 statements in it. */
475 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
477 /* Traverse the basic block array placing edges. */
478 FOR_EACH_BB (bb)
480 tree first = first_stmt (bb);
481 tree last = last_stmt (bb);
483 if (first)
485 /* Edges for statements that always alter flow control. */
486 if (is_ctrl_stmt (last))
487 make_ctrl_stmt_edges (bb);
489 /* Edges for statements that sometimes alter flow control. */
490 if (is_ctrl_altering_stmt (last))
491 make_exit_edges (bb);
494 /* Finally, if no edges were created above, this is a regular
495 basic block that only needs a fallthru edge. */
496 if (EDGE_COUNT (bb->succs) == 0)
497 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
500 /* We do not care about fake edges, so remove any that the CFG
501 builder inserted for completeness. */
502 remove_fake_exit_edges ();
504 /* Fold COND_EXPR_COND of each COND_EXPR. */
505 fold_cond_expr_cond ();
507 /* Clean up the graph and warn for unreachable code. */
508 cleanup_tree_cfg ();
512 /* Create edges for control statement at basic block BB. */
514 static void
515 make_ctrl_stmt_edges (basic_block bb)
517 tree last = last_stmt (bb);
519 gcc_assert (last);
520 switch (TREE_CODE (last))
522 case GOTO_EXPR:
523 make_goto_expr_edges (bb);
524 break;
526 case RETURN_EXPR:
527 make_edge (bb, EXIT_BLOCK_PTR, 0);
528 break;
530 case COND_EXPR:
531 make_cond_expr_edges (bb);
532 break;
534 case SWITCH_EXPR:
535 make_switch_expr_edges (bb);
536 break;
538 case RESX_EXPR:
539 make_eh_edges (last);
540 /* Yet another NORETURN hack. */
541 if (EDGE_COUNT (bb->succs) == 0)
542 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
543 break;
545 default:
546 gcc_unreachable ();
551 /* Create exit edges for statements in block BB that alter the flow of
552 control. Statements that alter the control flow are 'goto', 'return'
553 and calls to non-returning functions. */
555 static void
556 make_exit_edges (basic_block bb)
558 tree last = last_stmt (bb), op;
560 gcc_assert (last);
561 switch (TREE_CODE (last))
563 case CALL_EXPR:
564 /* If this function receives a nonlocal goto, then we need to
565 make edges from this call site to all the nonlocal goto
566 handlers. */
567 if (TREE_SIDE_EFFECTS (last)
568 && current_function_has_nonlocal_label)
569 make_goto_expr_edges (bb);
571 /* If this statement has reachable exception handlers, then
572 create abnormal edges to them. */
573 make_eh_edges (last);
575 /* Some calls are known not to return. For such calls we create
576 a fake edge.
578 We really need to revamp how we build edges so that it's not
579 such a bloody pain to avoid creating edges for this case since
580 all we do is remove these edges when we're done building the
581 CFG. */
582 if (call_expr_flags (last) & ECF_NORETURN)
584 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
585 return;
588 /* Don't forget the fall-thru edge. */
589 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
590 break;
592 case MODIFY_EXPR:
593 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
594 may have an abnormal edge. Search the RHS for this case and
595 create any required edges. */
596 op = get_call_expr_in (last);
597 if (op && TREE_SIDE_EFFECTS (op)
598 && current_function_has_nonlocal_label)
599 make_goto_expr_edges (bb);
601 make_eh_edges (last);
602 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
603 break;
605 default:
606 gcc_unreachable ();
611 /* Create the edges for a COND_EXPR starting at block BB.
612 At this point, both clauses must contain only simple gotos. */
614 static void
615 make_cond_expr_edges (basic_block bb)
617 tree entry = last_stmt (bb);
618 basic_block then_bb, else_bb;
619 tree then_label, else_label;
621 gcc_assert (entry);
622 gcc_assert (TREE_CODE (entry) == COND_EXPR);
624 /* Entry basic blocks for each component. */
625 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
626 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
627 then_bb = label_to_block (then_label);
628 else_bb = label_to_block (else_label);
630 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
631 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
634 /* Hashing routine for EDGE_TO_CASES. */
636 static hashval_t
637 edge_to_cases_hash (const void *p)
639 edge e = ((struct edge_to_cases_elt *)p)->e;
641 /* Hash on the edge itself (which is a pointer). */
642 return htab_hash_pointer (e);
645 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
646 for equality is just a pointer comparison. */
648 static int
649 edge_to_cases_eq (const void *p1, const void *p2)
651 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
652 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
654 return e1 == e2;
657 /* Called for each element in the hash table (P) as we delete the
658 edge to cases hash table.
660 Clear all the TREE_CHAINs to prevent problems with copying of
661 SWITCH_EXPRs and structure sharing rules, then free the hash table
662 element. */
664 static void
665 edge_to_cases_cleanup (void *p)
667 struct edge_to_cases_elt *elt = p;
668 tree t, next;
670 for (t = elt->case_labels; t; t = next)
672 next = TREE_CHAIN (t);
673 TREE_CHAIN (t) = NULL;
675 free (p);
678 /* Start recording information mapping edges to case labels. */
680 static void
681 start_recording_case_labels (void)
683 gcc_assert (edge_to_cases == NULL);
685 edge_to_cases = htab_create (37,
686 edge_to_cases_hash,
687 edge_to_cases_eq,
688 edge_to_cases_cleanup);
691 /* Return nonzero if we are recording information for case labels. */
693 static bool
694 recording_case_labels_p (void)
696 return (edge_to_cases != NULL);
699 /* Stop recording information mapping edges to case labels and
700 remove any information we have recorded. */
701 static void
702 end_recording_case_labels (void)
704 htab_delete (edge_to_cases);
705 edge_to_cases = NULL;
708 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
710 static void
711 record_switch_edge (edge e, tree case_label)
713 struct edge_to_cases_elt *elt;
714 void **slot;
716 /* Build a hash table element so we can see if E is already
717 in the table. */
718 elt = xmalloc (sizeof (struct edge_to_cases_elt));
719 elt->e = e;
720 elt->case_labels = case_label;
722 slot = htab_find_slot (edge_to_cases, elt, INSERT);
724 if (*slot == NULL)
726 /* E was not in the hash table. Install E into the hash table. */
727 *slot = (void *)elt;
729 else
731 /* E was already in the hash table. Free ELT as we do not need it
732 anymore. */
733 free (elt);
735 /* Get the entry stored in the hash table. */
736 elt = (struct edge_to_cases_elt *) *slot;
738 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
739 TREE_CHAIN (case_label) = elt->case_labels;
740 elt->case_labels = case_label;
744 /* If we are inside a {start,end}_recording_cases block, then return
745 a chain of CASE_LABEL_EXPRs from T which reference E.
747 Otherwise return NULL. */
749 static tree
750 get_cases_for_edge (edge e, tree t)
752 struct edge_to_cases_elt elt, *elt_p;
753 void **slot;
754 size_t i, n;
755 tree vec;
757 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
758 chains available. Return NULL so the caller can detect this case. */
759 if (!recording_case_labels_p ())
760 return NULL;
762 restart:
763 elt.e = e;
764 elt.case_labels = NULL;
765 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
767 if (slot)
769 elt_p = (struct edge_to_cases_elt *)*slot;
770 return elt_p->case_labels;
773 /* If we did not find E in the hash table, then this must be the first
774 time we have been queried for information about E & T. Add all the
775 elements from T to the hash table then perform the query again. */
777 vec = SWITCH_LABELS (t);
778 n = TREE_VEC_LENGTH (vec);
779 for (i = 0; i < n; i++)
781 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
782 basic_block label_bb = label_to_block (lab);
783 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
785 goto restart;
788 /* Create the edges for a SWITCH_EXPR starting at block BB.
789 At this point, the switch body has been lowered and the
790 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
792 static void
793 make_switch_expr_edges (basic_block bb)
795 tree entry = last_stmt (bb);
796 size_t i, n;
797 tree vec;
799 vec = SWITCH_LABELS (entry);
800 n = TREE_VEC_LENGTH (vec);
802 for (i = 0; i < n; ++i)
804 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
805 basic_block label_bb = label_to_block (lab);
806 make_edge (bb, label_bb, 0);
811 /* Return the basic block holding label DEST. */
813 basic_block
814 label_to_block (tree dest)
816 int uid = LABEL_DECL_UID (dest);
818 /* We would die hard when faced by an undefined label. Emit a label to
819 the very first basic block. This will hopefully make even the dataflow
820 and undefined variable warnings quite right. */
821 if ((errorcount || sorrycount) && uid < 0)
823 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
824 tree stmt;
826 stmt = build1 (LABEL_EXPR, void_type_node, dest);
827 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
828 uid = LABEL_DECL_UID (dest);
830 return VARRAY_BB (label_to_block_map, uid);
834 /* Create edges for a goto statement at block BB. */
836 static void
837 make_goto_expr_edges (basic_block bb)
839 tree goto_t, dest;
840 basic_block target_bb;
841 int for_call;
842 block_stmt_iterator last = bsi_last (bb);
844 goto_t = bsi_stmt (last);
846 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
847 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
848 from a nonlocal goto. */
849 if (TREE_CODE (goto_t) != GOTO_EXPR)
851 dest = error_mark_node;
852 for_call = 1;
854 else
856 dest = GOTO_DESTINATION (goto_t);
857 for_call = 0;
859 /* A GOTO to a local label creates normal edges. */
860 if (simple_goto_p (goto_t))
862 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
863 #ifdef USE_MAPPED_LOCATION
864 e->goto_locus = EXPR_LOCATION (goto_t);
865 #else
866 e->goto_locus = EXPR_LOCUS (goto_t);
867 #endif
868 bsi_remove (&last);
869 return;
872 /* Nothing more to do for nonlocal gotos. */
873 if (TREE_CODE (dest) == LABEL_DECL)
874 return;
876 /* Computed gotos remain. */
879 /* Look for the block starting with the destination label. In the
880 case of a computed goto, make an edge to any label block we find
881 in the CFG. */
882 FOR_EACH_BB (target_bb)
884 block_stmt_iterator bsi;
886 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
888 tree target = bsi_stmt (bsi);
890 if (TREE_CODE (target) != LABEL_EXPR)
891 break;
893 if (
894 /* Computed GOTOs. Make an edge to every label block that has
895 been marked as a potential target for a computed goto. */
896 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
897 /* Nonlocal GOTO target. Make an edge to every label block
898 that has been marked as a potential target for a nonlocal
899 goto. */
900 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
902 make_edge (bb, target_bb, EDGE_ABNORMAL);
903 break;
908 /* Degenerate case of computed goto with no labels. */
909 if (!for_call && EDGE_COUNT (bb->succs) == 0)
910 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
914 /*---------------------------------------------------------------------------
915 Flowgraph analysis
916 ---------------------------------------------------------------------------*/
918 /* Remove unreachable blocks and other miscellaneous clean up work. */
920 bool
921 cleanup_tree_cfg (void)
923 bool retval = false;
925 timevar_push (TV_TREE_CLEANUP_CFG);
927 retval = cleanup_control_flow ();
928 retval |= delete_unreachable_blocks ();
930 /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
931 which can get expensive. So we want to enable recording of edge
932 to CASE_LABEL_EXPR mappings around the call to
933 cleanup_forwarder_blocks. */
934 start_recording_case_labels ();
935 retval |= cleanup_forwarder_blocks ();
936 end_recording_case_labels ();
938 #ifdef ENABLE_CHECKING
939 if (retval)
941 gcc_assert (!cleanup_control_flow ());
942 gcc_assert (!delete_unreachable_blocks ());
943 gcc_assert (!cleanup_forwarder_blocks ());
945 #endif
947 /* Merging the blocks creates no new opportunities for the other
948 optimizations, so do it here. */
949 retval |= merge_seq_blocks ();
951 compact_blocks ();
953 #ifdef ENABLE_CHECKING
954 verify_flow_info ();
955 #endif
956 timevar_pop (TV_TREE_CLEANUP_CFG);
957 return retval;
961 /* Cleanup useless labels in basic blocks. This is something we wish
962 to do early because it allows us to group case labels before creating
963 the edges for the CFG, and it speeds up block statement iterators in
964 all passes later on.
965 We only run this pass once, running it more than once is probably not
966 profitable. */
968 /* A map from basic block index to the leading label of that block. */
969 static tree *label_for_bb;
971 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
972 static void
973 update_eh_label (struct eh_region *region)
975 tree old_label = get_eh_region_tree_label (region);
976 if (old_label)
978 tree new_label;
979 basic_block bb = label_to_block (old_label);
981 /* ??? After optimizing, there may be EH regions with labels
982 that have already been removed from the function body, so
983 there is no basic block for them. */
984 if (! bb)
985 return;
987 new_label = label_for_bb[bb->index];
988 set_eh_region_tree_label (region, new_label);
992 /* Given LABEL return the first label in the same basic block. */
993 static tree
994 main_block_label (tree label)
996 basic_block bb = label_to_block (label);
998 /* label_to_block possibly inserted undefined label into the chain. */
999 if (!label_for_bb[bb->index])
1000 label_for_bb[bb->index] = label;
1001 return label_for_bb[bb->index];
1004 /* Cleanup redundant labels. This is a three-step process:
1005 1) Find the leading label for each block.
1006 2) Redirect all references to labels to the leading labels.
1007 3) Cleanup all useless labels. */
1009 void
1010 cleanup_dead_labels (void)
1012 basic_block bb;
1013 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
1015 /* Find a suitable label for each block. We use the first user-defined
1016 label if there is one, or otherwise just the first label we see. */
1017 FOR_EACH_BB (bb)
1019 block_stmt_iterator i;
1021 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1023 tree label, stmt = bsi_stmt (i);
1025 if (TREE_CODE (stmt) != LABEL_EXPR)
1026 break;
1028 label = LABEL_EXPR_LABEL (stmt);
1030 /* If we have not yet seen a label for the current block,
1031 remember this one and see if there are more labels. */
1032 if (! label_for_bb[bb->index])
1034 label_for_bb[bb->index] = label;
1035 continue;
1038 /* If we did see a label for the current block already, but it
1039 is an artificially created label, replace it if the current
1040 label is a user defined label. */
1041 if (! DECL_ARTIFICIAL (label)
1042 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1044 label_for_bb[bb->index] = label;
1045 break;
1050 /* Now redirect all jumps/branches to the selected label.
1051 First do so for each block ending in a control statement. */
1052 FOR_EACH_BB (bb)
1054 tree stmt = last_stmt (bb);
1055 if (!stmt)
1056 continue;
1058 switch (TREE_CODE (stmt))
1060 case COND_EXPR:
1062 tree true_branch, false_branch;
1064 true_branch = COND_EXPR_THEN (stmt);
1065 false_branch = COND_EXPR_ELSE (stmt);
1067 GOTO_DESTINATION (true_branch)
1068 = main_block_label (GOTO_DESTINATION (true_branch));
1069 GOTO_DESTINATION (false_branch)
1070 = main_block_label (GOTO_DESTINATION (false_branch));
1072 break;
1075 case SWITCH_EXPR:
1077 size_t i;
1078 tree vec = SWITCH_LABELS (stmt);
1079 size_t n = TREE_VEC_LENGTH (vec);
1081 /* Replace all destination labels. */
1082 for (i = 0; i < n; ++i)
1084 tree elt = TREE_VEC_ELT (vec, i);
1085 tree label = main_block_label (CASE_LABEL (elt));
1086 CASE_LABEL (elt) = label;
1088 break;
1091 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1092 remove them until after we've created the CFG edges. */
1093 case GOTO_EXPR:
1094 if (! computed_goto_p (stmt))
1096 GOTO_DESTINATION (stmt)
1097 = main_block_label (GOTO_DESTINATION (stmt));
1098 break;
1101 default:
1102 break;
1106 for_each_eh_region (update_eh_label);
1108 /* Finally, purge dead labels. All user-defined labels and labels that
1109 can be the target of non-local gotos are preserved. */
1110 FOR_EACH_BB (bb)
1112 block_stmt_iterator i;
1113 tree label_for_this_bb = label_for_bb[bb->index];
1115 if (! label_for_this_bb)
1116 continue;
1118 for (i = bsi_start (bb); !bsi_end_p (i); )
1120 tree label, stmt = bsi_stmt (i);
1122 if (TREE_CODE (stmt) != LABEL_EXPR)
1123 break;
1125 label = LABEL_EXPR_LABEL (stmt);
1127 if (label == label_for_this_bb
1128 || ! DECL_ARTIFICIAL (label)
1129 || DECL_NONLOCAL (label))
1130 bsi_next (&i);
1131 else
1132 bsi_remove (&i);
1136 free (label_for_bb);
1139 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1140 and scan the sorted vector of cases. Combine the ones jumping to the
1141 same label.
1142 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1144 void
1145 group_case_labels (void)
1147 basic_block bb;
1149 FOR_EACH_BB (bb)
1151 tree stmt = last_stmt (bb);
1152 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1154 tree labels = SWITCH_LABELS (stmt);
1155 int old_size = TREE_VEC_LENGTH (labels);
1156 int i, j, new_size = old_size;
1157 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1158 tree default_label;
1160 /* The default label is always the last case in a switch
1161 statement after gimplification. */
1162 default_label = CASE_LABEL (default_case);
1164 /* Look for possible opportunities to merge cases.
1165 Ignore the last element of the label vector because it
1166 must be the default case. */
1167 i = 0;
1168 while (i < old_size - 1)
1170 tree base_case, base_label, base_high, type;
1171 base_case = TREE_VEC_ELT (labels, i);
1173 gcc_assert (base_case);
1174 base_label = CASE_LABEL (base_case);
1176 /* Discard cases that have the same destination as the
1177 default case. */
1178 if (base_label == default_label)
1180 TREE_VEC_ELT (labels, i) = NULL_TREE;
1181 i++;
1182 new_size--;
1183 continue;
1186 type = TREE_TYPE (CASE_LOW (base_case));
1187 base_high = CASE_HIGH (base_case) ?
1188 CASE_HIGH (base_case) : CASE_LOW (base_case);
1189 i++;
1190 /* Try to merge case labels. Break out when we reach the end
1191 of the label vector or when we cannot merge the next case
1192 label with the current one. */
1193 while (i < old_size - 1)
1195 tree merge_case = TREE_VEC_ELT (labels, i);
1196 tree merge_label = CASE_LABEL (merge_case);
1197 tree t = int_const_binop (PLUS_EXPR, base_high,
1198 integer_one_node, 1);
1200 /* Merge the cases if they jump to the same place,
1201 and their ranges are consecutive. */
1202 if (merge_label == base_label
1203 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1205 base_high = CASE_HIGH (merge_case) ?
1206 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1207 CASE_HIGH (base_case) = base_high;
1208 TREE_VEC_ELT (labels, i) = NULL_TREE;
1209 new_size--;
1210 i++;
1212 else
1213 break;
1217 /* Compress the case labels in the label vector, and adjust the
1218 length of the vector. */
1219 for (i = 0, j = 0; i < new_size; i++)
1221 while (! TREE_VEC_ELT (labels, j))
1222 j++;
1223 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1225 TREE_VEC_LENGTH (labels) = new_size;
1230 /* Checks whether we can merge block B into block A. */
1232 static bool
1233 tree_can_merge_blocks_p (basic_block a, basic_block b)
1235 tree stmt;
1236 block_stmt_iterator bsi;
1238 if (EDGE_COUNT (a->succs) != 1)
1239 return false;
1241 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
1242 return false;
1244 if (EDGE_SUCC (a, 0)->dest != b)
1245 return false;
1247 if (EDGE_COUNT (b->preds) > 1)
1248 return false;
1250 if (b == EXIT_BLOCK_PTR)
1251 return false;
1253 /* If A ends by a statement causing exceptions or something similar, we
1254 cannot merge the blocks. */
1255 stmt = last_stmt (a);
1256 if (stmt && stmt_ends_bb_p (stmt))
1257 return false;
1259 /* Do not allow a block with only a non-local label to be merged. */
1260 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1261 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1262 return false;
1264 /* There may be no phi nodes at the start of b. Most of these degenerate
1265 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1266 if (phi_nodes (b))
1267 return false;
1269 /* Do not remove user labels. */
1270 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1272 stmt = bsi_stmt (bsi);
1273 if (TREE_CODE (stmt) != LABEL_EXPR)
1274 break;
1275 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1276 return false;
1279 return true;
1283 /* Merge block B into block A. */
1285 static void
1286 tree_merge_blocks (basic_block a, basic_block b)
1288 block_stmt_iterator bsi;
1289 tree_stmt_iterator last;
1291 if (dump_file)
1292 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1294 /* Ensure that B follows A. */
1295 move_block_after (b, a);
1297 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1298 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1300 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1301 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1303 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1304 bsi_remove (&bsi);
1305 else
1307 set_bb_for_stmt (bsi_stmt (bsi), a);
1308 bsi_next (&bsi);
1312 /* Merge the chains. */
1313 last = tsi_last (a->stmt_list);
1314 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1315 b->stmt_list = NULL;
1319 /* Walk the function tree removing unnecessary statements.
1321 * Empty statement nodes are removed
1323 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1325 * Unnecessary COND_EXPRs are removed
1327 * Some unnecessary BIND_EXPRs are removed
1329 Clearly more work could be done. The trick is doing the analysis
1330 and removal fast enough to be a net improvement in compile times.
1332 Note that when we remove a control structure such as a COND_EXPR
1333 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1334 to ensure we eliminate all the useless code. */
1336 struct rus_data
1338 tree *last_goto;
1339 bool repeat;
1340 bool may_throw;
1341 bool may_branch;
1342 bool has_label;
1345 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1347 static bool
1348 remove_useless_stmts_warn_notreached (tree stmt)
1350 if (EXPR_HAS_LOCATION (stmt))
1352 location_t loc = EXPR_LOCATION (stmt);
1353 if (LOCATION_LINE (loc) > 0)
1355 warning ("%Hwill never be executed", &loc);
1356 return true;
1360 switch (TREE_CODE (stmt))
1362 case STATEMENT_LIST:
1364 tree_stmt_iterator i;
1365 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1366 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1367 return true;
1369 break;
1371 case COND_EXPR:
1372 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1373 return true;
1374 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1375 return true;
1376 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1377 return true;
1378 break;
1380 case TRY_FINALLY_EXPR:
1381 case TRY_CATCH_EXPR:
1382 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1383 return true;
1384 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1385 return true;
1386 break;
1388 case CATCH_EXPR:
1389 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1390 case EH_FILTER_EXPR:
1391 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1392 case BIND_EXPR:
1393 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1395 default:
1396 /* Not a live container. */
1397 break;
1400 return false;
1403 static void
1404 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1406 tree then_clause, else_clause, cond;
1407 bool save_has_label, then_has_label, else_has_label;
1409 save_has_label = data->has_label;
1410 data->has_label = false;
1411 data->last_goto = NULL;
1413 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1415 then_has_label = data->has_label;
1416 data->has_label = false;
1417 data->last_goto = NULL;
1419 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1421 else_has_label = data->has_label;
1422 data->has_label = save_has_label | then_has_label | else_has_label;
1424 then_clause = COND_EXPR_THEN (*stmt_p);
1425 else_clause = COND_EXPR_ELSE (*stmt_p);
1426 cond = fold (COND_EXPR_COND (*stmt_p));
1428 /* If neither arm does anything at all, we can remove the whole IF. */
1429 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1431 *stmt_p = build_empty_stmt ();
1432 data->repeat = true;
1435 /* If there are no reachable statements in an arm, then we can
1436 zap the entire conditional. */
1437 else if (integer_nonzerop (cond) && !else_has_label)
1439 if (warn_notreached)
1440 remove_useless_stmts_warn_notreached (else_clause);
1441 *stmt_p = then_clause;
1442 data->repeat = true;
1444 else if (integer_zerop (cond) && !then_has_label)
1446 if (warn_notreached)
1447 remove_useless_stmts_warn_notreached (then_clause);
1448 *stmt_p = else_clause;
1449 data->repeat = true;
1452 /* Check a couple of simple things on then/else with single stmts. */
1453 else
1455 tree then_stmt = expr_only (then_clause);
1456 tree else_stmt = expr_only (else_clause);
1458 /* Notice branches to a common destination. */
1459 if (then_stmt && else_stmt
1460 && TREE_CODE (then_stmt) == GOTO_EXPR
1461 && TREE_CODE (else_stmt) == GOTO_EXPR
1462 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1464 *stmt_p = then_stmt;
1465 data->repeat = true;
1468 /* If the THEN/ELSE clause merely assigns a value to a variable or
1469 parameter which is already known to contain that value, then
1470 remove the useless THEN/ELSE clause. */
1471 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1473 if (else_stmt
1474 && TREE_CODE (else_stmt) == MODIFY_EXPR
1475 && TREE_OPERAND (else_stmt, 0) == cond
1476 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1477 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1479 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1480 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1481 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1482 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1484 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1485 ? then_stmt : else_stmt);
1486 tree *location = (TREE_CODE (cond) == EQ_EXPR
1487 ? &COND_EXPR_THEN (*stmt_p)
1488 : &COND_EXPR_ELSE (*stmt_p));
1490 if (stmt
1491 && TREE_CODE (stmt) == MODIFY_EXPR
1492 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1493 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1494 *location = alloc_stmt_list ();
1498 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1499 would be re-introduced during lowering. */
1500 data->last_goto = NULL;
1504 static void
1505 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1507 bool save_may_branch, save_may_throw;
1508 bool this_may_branch, this_may_throw;
1510 /* Collect may_branch and may_throw information for the body only. */
1511 save_may_branch = data->may_branch;
1512 save_may_throw = data->may_throw;
1513 data->may_branch = false;
1514 data->may_throw = false;
1515 data->last_goto = NULL;
1517 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1519 this_may_branch = data->may_branch;
1520 this_may_throw = data->may_throw;
1521 data->may_branch |= save_may_branch;
1522 data->may_throw |= save_may_throw;
1523 data->last_goto = NULL;
1525 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1527 /* If the body is empty, then we can emit the FINALLY block without
1528 the enclosing TRY_FINALLY_EXPR. */
1529 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1531 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1532 data->repeat = true;
1535 /* If the handler is empty, then we can emit the TRY block without
1536 the enclosing TRY_FINALLY_EXPR. */
1537 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1539 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1540 data->repeat = true;
1543 /* If the body neither throws, nor branches, then we can safely
1544 string the TRY and FINALLY blocks together. */
1545 else if (!this_may_branch && !this_may_throw)
1547 tree stmt = *stmt_p;
1548 *stmt_p = TREE_OPERAND (stmt, 0);
1549 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1550 data->repeat = true;
1555 static void
1556 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1558 bool save_may_throw, this_may_throw;
1559 tree_stmt_iterator i;
1560 tree stmt;
1562 /* Collect may_throw information for the body only. */
1563 save_may_throw = data->may_throw;
1564 data->may_throw = false;
1565 data->last_goto = NULL;
1567 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1569 this_may_throw = data->may_throw;
1570 data->may_throw = save_may_throw;
1572 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1573 if (!this_may_throw)
1575 if (warn_notreached)
1576 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1577 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1578 data->repeat = true;
1579 return;
1582 /* Process the catch clause specially. We may be able to tell that
1583 no exceptions propagate past this point. */
1585 this_may_throw = true;
1586 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1587 stmt = tsi_stmt (i);
1588 data->last_goto = NULL;
1590 switch (TREE_CODE (stmt))
1592 case CATCH_EXPR:
1593 for (; !tsi_end_p (i); tsi_next (&i))
1595 stmt = tsi_stmt (i);
1596 /* If we catch all exceptions, then the body does not
1597 propagate exceptions past this point. */
1598 if (CATCH_TYPES (stmt) == NULL)
1599 this_may_throw = false;
1600 data->last_goto = NULL;
1601 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1603 break;
1605 case EH_FILTER_EXPR:
1606 if (EH_FILTER_MUST_NOT_THROW (stmt))
1607 this_may_throw = false;
1608 else if (EH_FILTER_TYPES (stmt) == NULL)
1609 this_may_throw = false;
1610 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1611 break;
1613 default:
1614 /* Otherwise this is a cleanup. */
1615 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1617 /* If the cleanup is empty, then we can emit the TRY block without
1618 the enclosing TRY_CATCH_EXPR. */
1619 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1621 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1622 data->repeat = true;
1624 break;
1626 data->may_throw |= this_may_throw;
1630 static void
1631 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1633 tree block;
1635 /* First remove anything underneath the BIND_EXPR. */
1636 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1638 /* If the BIND_EXPR has no variables, then we can pull everything
1639 up one level and remove the BIND_EXPR, unless this is the toplevel
1640 BIND_EXPR for the current function or an inlined function.
1642 When this situation occurs we will want to apply this
1643 optimization again. */
1644 block = BIND_EXPR_BLOCK (*stmt_p);
1645 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1646 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1647 && (! block
1648 || ! BLOCK_ABSTRACT_ORIGIN (block)
1649 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1650 != FUNCTION_DECL)))
1652 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1653 data->repeat = true;
1658 static void
1659 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1661 tree dest = GOTO_DESTINATION (*stmt_p);
1663 data->may_branch = true;
1664 data->last_goto = NULL;
1666 /* Record the last goto expr, so that we can delete it if unnecessary. */
1667 if (TREE_CODE (dest) == LABEL_DECL)
1668 data->last_goto = stmt_p;
1672 static void
1673 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1675 tree label = LABEL_EXPR_LABEL (*stmt_p);
1677 data->has_label = true;
1679 /* We do want to jump across non-local label receiver code. */
1680 if (DECL_NONLOCAL (label))
1681 data->last_goto = NULL;
1683 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1685 *data->last_goto = build_empty_stmt ();
1686 data->repeat = true;
1689 /* ??? Add something here to delete unused labels. */
1693 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1694 decl. This allows us to eliminate redundant or useless
1695 calls to "const" functions.
1697 Gimplifier already does the same operation, but we may notice functions
1698 being const and pure once their calls has been gimplified, so we need
1699 to update the flag. */
1701 static void
1702 update_call_expr_flags (tree call)
1704 tree decl = get_callee_fndecl (call);
1705 if (!decl)
1706 return;
1707 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1708 TREE_SIDE_EFFECTS (call) = 0;
1709 if (TREE_NOTHROW (decl))
1710 TREE_NOTHROW (call) = 1;
1714 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1716 void
1717 notice_special_calls (tree t)
1719 int flags = call_expr_flags (t);
1721 if (flags & ECF_MAY_BE_ALLOCA)
1722 current_function_calls_alloca = true;
1723 if (flags & ECF_RETURNS_TWICE)
1724 current_function_calls_setjmp = true;
1728 /* Clear flags set by notice_special_calls. Used by dead code removal
1729 to update the flags. */
1731 void
1732 clear_special_calls (void)
1734 current_function_calls_alloca = false;
1735 current_function_calls_setjmp = false;
1739 static void
1740 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1742 tree t = *tp, op;
1744 switch (TREE_CODE (t))
1746 case COND_EXPR:
1747 remove_useless_stmts_cond (tp, data);
1748 break;
1750 case TRY_FINALLY_EXPR:
1751 remove_useless_stmts_tf (tp, data);
1752 break;
1754 case TRY_CATCH_EXPR:
1755 remove_useless_stmts_tc (tp, data);
1756 break;
1758 case BIND_EXPR:
1759 remove_useless_stmts_bind (tp, data);
1760 break;
1762 case GOTO_EXPR:
1763 remove_useless_stmts_goto (tp, data);
1764 break;
1766 case LABEL_EXPR:
1767 remove_useless_stmts_label (tp, data);
1768 break;
1770 case RETURN_EXPR:
1771 fold_stmt (tp);
1772 data->last_goto = NULL;
1773 data->may_branch = true;
1774 break;
1776 case CALL_EXPR:
1777 fold_stmt (tp);
1778 data->last_goto = NULL;
1779 notice_special_calls (t);
1780 update_call_expr_flags (t);
1781 if (tree_could_throw_p (t))
1782 data->may_throw = true;
1783 break;
1785 case MODIFY_EXPR:
1786 data->last_goto = NULL;
1787 fold_stmt (tp);
1788 op = get_call_expr_in (t);
1789 if (op)
1791 update_call_expr_flags (op);
1792 notice_special_calls (op);
1794 if (tree_could_throw_p (t))
1795 data->may_throw = true;
1796 break;
1798 case STATEMENT_LIST:
1800 tree_stmt_iterator i = tsi_start (t);
1801 while (!tsi_end_p (i))
1803 t = tsi_stmt (i);
1804 if (IS_EMPTY_STMT (t))
1806 tsi_delink (&i);
1807 continue;
1810 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1812 t = tsi_stmt (i);
1813 if (TREE_CODE (t) == STATEMENT_LIST)
1815 tsi_link_before (&i, t, TSI_SAME_STMT);
1816 tsi_delink (&i);
1818 else
1819 tsi_next (&i);
1822 break;
1823 case ASM_EXPR:
1824 fold_stmt (tp);
1825 data->last_goto = NULL;
1826 break;
1828 default:
1829 data->last_goto = NULL;
1830 break;
1834 static void
1835 remove_useless_stmts (void)
1837 struct rus_data data;
1839 clear_special_calls ();
1843 memset (&data, 0, sizeof (data));
1844 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1846 while (data.repeat);
1850 struct tree_opt_pass pass_remove_useless_stmts =
1852 "useless", /* name */
1853 NULL, /* gate */
1854 remove_useless_stmts, /* execute */
1855 NULL, /* sub */
1856 NULL, /* next */
1857 0, /* static_pass_number */
1858 0, /* tv_id */
1859 PROP_gimple_any, /* properties_required */
1860 0, /* properties_provided */
1861 0, /* properties_destroyed */
1862 0, /* todo_flags_start */
1863 TODO_dump_func, /* todo_flags_finish */
1864 0 /* letter */
1868 /* Remove obviously useless statements in basic block BB. */
1870 static void
1871 cfg_remove_useless_stmts_bb (basic_block bb)
1873 block_stmt_iterator bsi;
1874 tree stmt = NULL_TREE;
1875 tree cond, var = NULL_TREE, val = NULL_TREE;
1876 struct var_ann_d *ann;
1878 /* Check whether we come here from a condition, and if so, get the
1879 condition. */
1880 if (EDGE_COUNT (bb->preds) != 1
1881 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1882 return;
1884 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
1886 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1888 var = cond;
1889 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1890 ? boolean_false_node : boolean_true_node);
1892 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1893 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1894 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1896 var = TREE_OPERAND (cond, 0);
1897 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1898 ? boolean_true_node : boolean_false_node);
1900 else
1902 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
1903 cond = invert_truthvalue (cond);
1904 if (TREE_CODE (cond) == EQ_EXPR
1905 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1906 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1907 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1908 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1909 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1911 var = TREE_OPERAND (cond, 0);
1912 val = TREE_OPERAND (cond, 1);
1914 else
1915 return;
1918 /* Only work for normal local variables. */
1919 ann = var_ann (var);
1920 if (!ann
1921 || ann->may_aliases
1922 || TREE_ADDRESSABLE (var))
1923 return;
1925 if (! TREE_CONSTANT (val))
1927 ann = var_ann (val);
1928 if (!ann
1929 || ann->may_aliases
1930 || TREE_ADDRESSABLE (val))
1931 return;
1934 /* Ignore floating point variables, since comparison behaves weird for
1935 them. */
1936 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1937 return;
1939 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1941 stmt = bsi_stmt (bsi);
1943 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1944 which is already known to contain that value, then remove the useless
1945 THEN/ELSE clause. */
1946 if (TREE_CODE (stmt) == MODIFY_EXPR
1947 && TREE_OPERAND (stmt, 0) == var
1948 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1950 bsi_remove (&bsi);
1951 continue;
1954 /* Invalidate the var if we encounter something that could modify it.
1955 Likewise for the value it was previously set to. Note that we only
1956 consider values that are either a VAR_DECL or PARM_DECL so we
1957 can test for conflict very simply. */
1958 if (TREE_CODE (stmt) == ASM_EXPR
1959 || (TREE_CODE (stmt) == MODIFY_EXPR
1960 && (TREE_OPERAND (stmt, 0) == var
1961 || TREE_OPERAND (stmt, 0) == val)))
1962 return;
1964 bsi_next (&bsi);
1969 /* A CFG-aware version of remove_useless_stmts. */
1971 void
1972 cfg_remove_useless_stmts (void)
1974 basic_block bb;
1976 #ifdef ENABLE_CHECKING
1977 verify_flow_info ();
1978 #endif
1980 FOR_EACH_BB (bb)
1982 cfg_remove_useless_stmts_bb (bb);
1987 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1989 static void
1990 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1992 tree phi;
1994 /* Since this block is no longer reachable, we can just delete all
1995 of its PHI nodes. */
1996 phi = phi_nodes (bb);
1997 while (phi)
1999 tree next = PHI_CHAIN (phi);
2000 remove_phi_node (phi, NULL_TREE);
2001 phi = next;
2004 /* Remove edges to BB's successors. */
2005 while (EDGE_COUNT (bb->succs) > 0)
2006 remove_edge (EDGE_SUCC (bb, 0));
2010 /* Remove statements of basic block BB. */
2012 static void
2013 remove_bb (basic_block bb)
2015 block_stmt_iterator i;
2016 source_locus loc = 0;
2018 if (dump_file)
2020 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2021 if (dump_flags & TDF_DETAILS)
2023 dump_bb (bb, dump_file, 0);
2024 fprintf (dump_file, "\n");
2028 /* Remove all the instructions in the block. */
2029 for (i = bsi_start (bb); !bsi_end_p (i);)
2031 tree stmt = bsi_stmt (i);
2032 if (TREE_CODE (stmt) == LABEL_EXPR
2033 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2035 basic_block new_bb = bb->prev_bb;
2036 block_stmt_iterator new_bsi = bsi_start (new_bb);
2038 bsi_remove (&i);
2039 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2041 else
2043 release_defs (stmt);
2045 set_bb_for_stmt (stmt, NULL);
2046 bsi_remove (&i);
2049 /* Don't warn for removed gotos. Gotos are often removed due to
2050 jump threading, thus resulting in bogus warnings. Not great,
2051 since this way we lose warnings for gotos in the original
2052 program that are indeed unreachable. */
2053 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2055 source_locus t;
2057 #ifdef USE_MAPPED_LOCATION
2058 t = EXPR_LOCATION (stmt);
2059 #else
2060 t = EXPR_LOCUS (stmt);
2061 #endif
2062 if (t && LOCATION_LINE (*t) > 0)
2063 loc = t;
2067 /* If requested, give a warning that the first statement in the
2068 block is unreachable. We walk statements backwards in the
2069 loop above, so the last statement we process is the first statement
2070 in the block. */
2071 if (warn_notreached && loc)
2072 #ifdef USE_MAPPED_LOCATION
2073 warning ("%Hwill never be executed", &loc);
2074 #else
2075 warning ("%Hwill never be executed", loc);
2076 #endif
2078 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2081 /* A list of all the noreturn calls passed to modify_stmt.
2082 cleanup_control_flow uses it to detect cases where a mid-block
2083 indirect call has been turned into a noreturn call. When this
2084 happens, all the instructions after the call are no longer
2085 reachable and must be deleted as dead. */
2087 VEC(tree) *modified_noreturn_calls;
2089 /* Try to remove superfluous control structures. */
2091 static bool
2092 cleanup_control_flow (void)
2094 basic_block bb;
2095 block_stmt_iterator bsi;
2096 bool retval = false;
2097 tree stmt;
2099 /* Detect cases where a mid-block call is now known not to return. */
2100 while (VEC_length (tree, modified_noreturn_calls))
2102 stmt = VEC_pop (tree, modified_noreturn_calls);
2103 bb = bb_for_stmt (stmt);
2104 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
2105 split_block (bb, stmt);
2108 FOR_EACH_BB (bb)
2110 bsi = bsi_last (bb);
2112 if (bsi_end_p (bsi))
2113 continue;
2115 stmt = bsi_stmt (bsi);
2116 if (TREE_CODE (stmt) == COND_EXPR
2117 || TREE_CODE (stmt) == SWITCH_EXPR)
2118 retval |= cleanup_control_expr_graph (bb, bsi);
2120 /* Check for indirect calls that have been turned into
2121 noreturn calls. */
2122 if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
2124 free_dominance_info (CDI_DOMINATORS);
2125 retval = true;
2128 return retval;
2132 /* Disconnect an unreachable block in the control expression starting
2133 at block BB. */
2135 static bool
2136 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2138 edge taken_edge;
2139 bool retval = false;
2140 tree expr = bsi_stmt (bsi), val;
2142 if (EDGE_COUNT (bb->succs) > 1)
2144 edge e;
2145 edge_iterator ei;
2147 switch (TREE_CODE (expr))
2149 case COND_EXPR:
2150 val = COND_EXPR_COND (expr);
2151 break;
2153 case SWITCH_EXPR:
2154 val = SWITCH_COND (expr);
2155 if (TREE_CODE (val) != INTEGER_CST)
2156 return false;
2157 break;
2159 default:
2160 gcc_unreachable ();
2163 taken_edge = find_taken_edge (bb, val);
2164 if (!taken_edge)
2165 return false;
2167 /* Remove all the edges except the one that is always executed. */
2168 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2170 if (e != taken_edge)
2172 taken_edge->probability += e->probability;
2173 taken_edge->count += e->count;
2174 remove_edge (e);
2175 retval = true;
2177 else
2178 ei_next (&ei);
2180 if (taken_edge->probability > REG_BR_PROB_BASE)
2181 taken_edge->probability = REG_BR_PROB_BASE;
2183 else
2184 taken_edge = EDGE_SUCC (bb, 0);
2186 bsi_remove (&bsi);
2187 taken_edge->flags = EDGE_FALLTHRU;
2189 /* We removed some paths from the cfg. */
2190 free_dominance_info (CDI_DOMINATORS);
2192 return retval;
2195 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
2197 static bool
2198 remove_fallthru_edge (VEC(edge) *ev)
2200 edge_iterator ei;
2201 edge e;
2203 FOR_EACH_EDGE (e, ei, ev)
2204 if ((e->flags & EDGE_FALLTHRU) != 0)
2206 remove_edge (e);
2207 return true;
2209 return false;
2212 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2213 predicate VAL, return the edge that will be taken out of the block.
2214 If VAL does not match a unique edge, NULL is returned. */
2216 edge
2217 find_taken_edge (basic_block bb, tree val)
2219 tree stmt;
2221 stmt = last_stmt (bb);
2223 gcc_assert (stmt);
2224 gcc_assert (is_ctrl_stmt (stmt));
2225 gcc_assert (val);
2227 if (TREE_CODE (val) != INTEGER_CST)
2228 return NULL;
2230 if (TREE_CODE (stmt) == COND_EXPR)
2231 return find_taken_edge_cond_expr (bb, val);
2233 if (TREE_CODE (stmt) == SWITCH_EXPR)
2234 return find_taken_edge_switch_expr (bb, val);
2236 gcc_unreachable ();
2240 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2241 statement, determine which of the two edges will be taken out of the
2242 block. Return NULL if either edge may be taken. */
2244 static edge
2245 find_taken_edge_cond_expr (basic_block bb, tree val)
2247 edge true_edge, false_edge;
2249 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2251 /* Otherwise, try to determine which branch of the if() will be taken.
2252 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2253 we don't really know which edge will be taken at runtime. This
2254 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2255 if (integer_nonzerop (val))
2256 return true_edge;
2257 else if (integer_zerop (val))
2258 return false_edge;
2260 gcc_unreachable ();
2264 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2265 statement, determine which edge will be taken out of the block. Return
2266 NULL if any edge may be taken. */
2268 static edge
2269 find_taken_edge_switch_expr (basic_block bb, tree val)
2271 tree switch_expr, taken_case;
2272 basic_block dest_bb;
2273 edge e;
2275 switch_expr = last_stmt (bb);
2276 taken_case = find_case_label_for_value (switch_expr, val);
2277 dest_bb = label_to_block (CASE_LABEL (taken_case));
2279 e = find_edge (bb, dest_bb);
2280 gcc_assert (e);
2281 return e;
2285 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2286 We can make optimal use here of the fact that the case labels are
2287 sorted: We can do a binary search for a case matching VAL. */
2289 static tree
2290 find_case_label_for_value (tree switch_expr, tree val)
2292 tree vec = SWITCH_LABELS (switch_expr);
2293 size_t low, high, n = TREE_VEC_LENGTH (vec);
2294 tree default_case = TREE_VEC_ELT (vec, n - 1);
2296 for (low = -1, high = n - 1; high - low > 1; )
2298 size_t i = (high + low) / 2;
2299 tree t = TREE_VEC_ELT (vec, i);
2300 int cmp;
2302 /* Cache the result of comparing CASE_LOW and val. */
2303 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2305 if (cmp > 0)
2306 high = i;
2307 else
2308 low = i;
2310 if (CASE_HIGH (t) == NULL)
2312 /* A singe-valued case label. */
2313 if (cmp == 0)
2314 return t;
2316 else
2318 /* A case range. We can only handle integer ranges. */
2319 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2320 return t;
2324 return default_case;
2328 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2329 those alternatives are equal in each of the PHI nodes, then return
2330 true, else return false. */
2332 static bool
2333 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2335 int n1 = e1->dest_idx;
2336 int n2 = e2->dest_idx;
2337 tree phi;
2339 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2341 tree val1 = PHI_ARG_DEF (phi, n1);
2342 tree val2 = PHI_ARG_DEF (phi, n2);
2344 gcc_assert (val1 != NULL_TREE);
2345 gcc_assert (val2 != NULL_TREE);
2347 if (!operand_equal_for_phi_arg_p (val1, val2))
2348 return false;
2351 return true;
2355 /*---------------------------------------------------------------------------
2356 Debugging functions
2357 ---------------------------------------------------------------------------*/
2359 /* Dump tree-specific information of block BB to file OUTF. */
2361 void
2362 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2364 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2368 /* Dump a basic block on stderr. */
2370 void
2371 debug_tree_bb (basic_block bb)
2373 dump_bb (bb, stderr, 0);
2377 /* Dump basic block with index N on stderr. */
2379 basic_block
2380 debug_tree_bb_n (int n)
2382 debug_tree_bb (BASIC_BLOCK (n));
2383 return BASIC_BLOCK (n);
2387 /* Dump the CFG on stderr.
2389 FLAGS are the same used by the tree dumping functions
2390 (see TDF_* in tree.h). */
2392 void
2393 debug_tree_cfg (int flags)
2395 dump_tree_cfg (stderr, flags);
2399 /* Dump the program showing basic block boundaries on the given FILE.
2401 FLAGS are the same used by the tree dumping functions (see TDF_* in
2402 tree.h). */
2404 void
2405 dump_tree_cfg (FILE *file, int flags)
2407 if (flags & TDF_DETAILS)
2409 const char *funcname
2410 = lang_hooks.decl_printable_name (current_function_decl, 2);
2412 fputc ('\n', file);
2413 fprintf (file, ";; Function %s\n\n", funcname);
2414 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2415 n_basic_blocks, n_edges, last_basic_block);
2417 brief_dump_cfg (file);
2418 fprintf (file, "\n");
2421 if (flags & TDF_STATS)
2422 dump_cfg_stats (file);
2424 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2428 /* Dump CFG statistics on FILE. */
2430 void
2431 dump_cfg_stats (FILE *file)
2433 static long max_num_merged_labels = 0;
2434 unsigned long size, total = 0;
2435 int n_edges;
2436 basic_block bb;
2437 const char * const fmt_str = "%-30s%-13s%12s\n";
2438 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2439 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2440 const char *funcname
2441 = lang_hooks.decl_printable_name (current_function_decl, 2);
2444 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2446 fprintf (file, "---------------------------------------------------------\n");
2447 fprintf (file, fmt_str, "", " Number of ", "Memory");
2448 fprintf (file, fmt_str, "", " instances ", "used ");
2449 fprintf (file, "---------------------------------------------------------\n");
2451 size = n_basic_blocks * sizeof (struct basic_block_def);
2452 total += size;
2453 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2454 SCALE (size), LABEL (size));
2456 n_edges = 0;
2457 FOR_EACH_BB (bb)
2458 n_edges += EDGE_COUNT (bb->succs);
2459 size = n_edges * sizeof (struct edge_def);
2460 total += size;
2461 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2463 size = n_basic_blocks * sizeof (struct bb_ann_d);
2464 total += size;
2465 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2466 SCALE (size), LABEL (size));
2468 fprintf (file, "---------------------------------------------------------\n");
2469 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2470 LABEL (total));
2471 fprintf (file, "---------------------------------------------------------\n");
2472 fprintf (file, "\n");
2474 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2475 max_num_merged_labels = cfg_stats.num_merged_labels;
2477 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2478 cfg_stats.num_merged_labels, max_num_merged_labels);
2480 fprintf (file, "\n");
2484 /* Dump CFG statistics on stderr. Keep extern so that it's always
2485 linked in the final executable. */
2487 void
2488 debug_cfg_stats (void)
2490 dump_cfg_stats (stderr);
2494 /* Dump the flowgraph to a .vcg FILE. */
2496 static void
2497 tree_cfg2vcg (FILE *file)
2499 edge e;
2500 edge_iterator ei;
2501 basic_block bb;
2502 const char *funcname
2503 = lang_hooks.decl_printable_name (current_function_decl, 2);
2505 /* Write the file header. */
2506 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2507 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2508 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2510 /* Write blocks and edges. */
2511 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2513 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2514 e->dest->index);
2516 if (e->flags & EDGE_FAKE)
2517 fprintf (file, " linestyle: dotted priority: 10");
2518 else
2519 fprintf (file, " linestyle: solid priority: 100");
2521 fprintf (file, " }\n");
2523 fputc ('\n', file);
2525 FOR_EACH_BB (bb)
2527 enum tree_code head_code, end_code;
2528 const char *head_name, *end_name;
2529 int head_line = 0;
2530 int end_line = 0;
2531 tree first = first_stmt (bb);
2532 tree last = last_stmt (bb);
2534 if (first)
2536 head_code = TREE_CODE (first);
2537 head_name = tree_code_name[head_code];
2538 head_line = get_lineno (first);
2540 else
2541 head_name = "no-statement";
2543 if (last)
2545 end_code = TREE_CODE (last);
2546 end_name = tree_code_name[end_code];
2547 end_line = get_lineno (last);
2549 else
2550 end_name = "no-statement";
2552 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2553 bb->index, bb->index, head_name, head_line, end_name,
2554 end_line);
2556 FOR_EACH_EDGE (e, ei, bb->succs)
2558 if (e->dest == EXIT_BLOCK_PTR)
2559 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2560 else
2561 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2563 if (e->flags & EDGE_FAKE)
2564 fprintf (file, " priority: 10 linestyle: dotted");
2565 else
2566 fprintf (file, " priority: 100 linestyle: solid");
2568 fprintf (file, " }\n");
2571 if (bb->next_bb != EXIT_BLOCK_PTR)
2572 fputc ('\n', file);
2575 fputs ("}\n\n", file);
2580 /*---------------------------------------------------------------------------
2581 Miscellaneous helpers
2582 ---------------------------------------------------------------------------*/
2584 /* Return true if T represents a stmt that always transfers control. */
2586 bool
2587 is_ctrl_stmt (tree t)
2589 return (TREE_CODE (t) == COND_EXPR
2590 || TREE_CODE (t) == SWITCH_EXPR
2591 || TREE_CODE (t) == GOTO_EXPR
2592 || TREE_CODE (t) == RETURN_EXPR
2593 || TREE_CODE (t) == RESX_EXPR);
2597 /* Return true if T is a statement that may alter the flow of control
2598 (e.g., a call to a non-returning function). */
2600 bool
2601 is_ctrl_altering_stmt (tree t)
2603 tree call;
2605 gcc_assert (t);
2606 call = get_call_expr_in (t);
2607 if (call)
2609 /* A non-pure/const CALL_EXPR alters flow control if the current
2610 function has nonlocal labels. */
2611 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2612 return true;
2614 /* A CALL_EXPR also alters control flow if it does not return. */
2615 if (call_expr_flags (call) & ECF_NORETURN)
2616 return true;
2619 /* If a statement can throw, it alters control flow. */
2620 return tree_can_throw_internal (t);
2624 /* Return true if T is a computed goto. */
2626 bool
2627 computed_goto_p (tree t)
2629 return (TREE_CODE (t) == GOTO_EXPR
2630 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2634 /* Checks whether EXPR is a simple local goto. */
2636 bool
2637 simple_goto_p (tree expr)
2639 return (TREE_CODE (expr) == GOTO_EXPR
2640 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2644 /* Return true if T should start a new basic block. PREV_T is the
2645 statement preceding T. It is used when T is a label or a case label.
2646 Labels should only start a new basic block if their previous statement
2647 wasn't a label. Otherwise, sequence of labels would generate
2648 unnecessary basic blocks that only contain a single label. */
2650 static inline bool
2651 stmt_starts_bb_p (tree t, tree prev_t)
2653 if (t == NULL_TREE)
2654 return false;
2656 /* LABEL_EXPRs start a new basic block only if the preceding
2657 statement wasn't a label of the same type. This prevents the
2658 creation of consecutive blocks that have nothing but a single
2659 label. */
2660 if (TREE_CODE (t) == LABEL_EXPR)
2662 /* Nonlocal and computed GOTO targets always start a new block. */
2663 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2664 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2665 return true;
2667 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2669 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2670 return true;
2672 cfg_stats.num_merged_labels++;
2673 return false;
2675 else
2676 return true;
2679 return false;
2683 /* Return true if T should end a basic block. */
2685 bool
2686 stmt_ends_bb_p (tree t)
2688 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2692 /* Add gotos that used to be represented implicitly in the CFG. */
2694 void
2695 disband_implicit_edges (void)
2697 basic_block bb;
2698 block_stmt_iterator last;
2699 edge e;
2700 edge_iterator ei;
2701 tree stmt, label;
2703 FOR_EACH_BB (bb)
2705 last = bsi_last (bb);
2706 stmt = last_stmt (bb);
2708 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2710 /* Remove superfluous gotos from COND_EXPR branches. Moved
2711 from cfg_remove_useless_stmts here since it violates the
2712 invariants for tree--cfg correspondence and thus fits better
2713 here where we do it anyway. */
2714 e = find_edge (bb, bb->next_bb);
2715 if (e)
2717 if (e->flags & EDGE_TRUE_VALUE)
2718 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2719 else if (e->flags & EDGE_FALSE_VALUE)
2720 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2721 else
2722 gcc_unreachable ();
2723 e->flags |= EDGE_FALLTHRU;
2726 continue;
2729 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2731 /* Remove the RETURN_EXPR if we may fall though to the exit
2732 instead. */
2733 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2734 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
2736 if (bb->next_bb == EXIT_BLOCK_PTR
2737 && !TREE_OPERAND (stmt, 0))
2739 bsi_remove (&last);
2740 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
2742 continue;
2745 /* There can be no fallthru edge if the last statement is a control
2746 one. */
2747 if (stmt && is_ctrl_stmt (stmt))
2748 continue;
2750 /* Find a fallthru edge and emit the goto if necessary. */
2751 FOR_EACH_EDGE (e, ei, bb->succs)
2752 if (e->flags & EDGE_FALLTHRU)
2753 break;
2755 if (!e || e->dest == bb->next_bb)
2756 continue;
2758 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2759 label = tree_block_label (e->dest);
2761 stmt = build1 (GOTO_EXPR, void_type_node, label);
2762 #ifdef USE_MAPPED_LOCATION
2763 SET_EXPR_LOCATION (stmt, e->goto_locus);
2764 #else
2765 SET_EXPR_LOCUS (stmt, e->goto_locus);
2766 #endif
2767 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2768 e->flags &= ~EDGE_FALLTHRU;
2772 /* Remove block annotations and other datastructures. */
2774 void
2775 delete_tree_cfg_annotations (void)
2777 basic_block bb;
2778 if (n_basic_blocks > 0)
2779 free_blocks_annotations ();
2781 label_to_block_map = NULL;
2782 free_rbi_pool ();
2783 FOR_EACH_BB (bb)
2784 bb->rbi = NULL;
2788 /* Return the first statement in basic block BB. */
2790 tree
2791 first_stmt (basic_block bb)
2793 block_stmt_iterator i = bsi_start (bb);
2794 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2798 /* Return the last statement in basic block BB. */
2800 tree
2801 last_stmt (basic_block bb)
2803 block_stmt_iterator b = bsi_last (bb);
2804 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2808 /* Return a pointer to the last statement in block BB. */
2810 tree *
2811 last_stmt_ptr (basic_block bb)
2813 block_stmt_iterator last = bsi_last (bb);
2814 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2818 /* Return the last statement of an otherwise empty block. Return NULL
2819 if the block is totally empty, or if it contains more than one
2820 statement. */
2822 tree
2823 last_and_only_stmt (basic_block bb)
2825 block_stmt_iterator i = bsi_last (bb);
2826 tree last, prev;
2828 if (bsi_end_p (i))
2829 return NULL_TREE;
2831 last = bsi_stmt (i);
2832 bsi_prev (&i);
2833 if (bsi_end_p (i))
2834 return last;
2836 /* Empty statements should no longer appear in the instruction stream.
2837 Everything that might have appeared before should be deleted by
2838 remove_useless_stmts, and the optimizers should just bsi_remove
2839 instead of smashing with build_empty_stmt.
2841 Thus the only thing that should appear here in a block containing
2842 one executable statement is a label. */
2843 prev = bsi_stmt (i);
2844 if (TREE_CODE (prev) == LABEL_EXPR)
2845 return last;
2846 else
2847 return NULL_TREE;
2851 /* Mark BB as the basic block holding statement T. */
2853 void
2854 set_bb_for_stmt (tree t, basic_block bb)
2856 if (TREE_CODE (t) == PHI_NODE)
2857 PHI_BB (t) = bb;
2858 else if (TREE_CODE (t) == STATEMENT_LIST)
2860 tree_stmt_iterator i;
2861 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2862 set_bb_for_stmt (tsi_stmt (i), bb);
2864 else
2866 stmt_ann_t ann = get_stmt_ann (t);
2867 ann->bb = bb;
2869 /* If the statement is a label, add the label to block-to-labels map
2870 so that we can speed up edge creation for GOTO_EXPRs. */
2871 if (TREE_CODE (t) == LABEL_EXPR)
2873 int uid;
2875 t = LABEL_EXPR_LABEL (t);
2876 uid = LABEL_DECL_UID (t);
2877 if (uid == -1)
2879 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2880 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2881 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2883 else
2884 /* We're moving an existing label. Make sure that we've
2885 removed it from the old block. */
2886 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2887 VARRAY_BB (label_to_block_map, uid) = bb;
2892 /* Finds iterator for STMT. */
2894 extern block_stmt_iterator
2895 bsi_for_stmt (tree stmt)
2897 block_stmt_iterator bsi;
2899 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2900 if (bsi_stmt (bsi) == stmt)
2901 return bsi;
2903 gcc_unreachable ();
2906 /* Insert statement (or statement list) T before the statement
2907 pointed-to by iterator I. M specifies how to update iterator I
2908 after insertion (see enum bsi_iterator_update). */
2910 void
2911 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2913 set_bb_for_stmt (t, i->bb);
2914 tsi_link_before (&i->tsi, t, m);
2915 modify_stmt (t);
2919 /* Insert statement (or statement list) T after the statement
2920 pointed-to by iterator I. M specifies how to update iterator I
2921 after insertion (see enum bsi_iterator_update). */
2923 void
2924 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2926 set_bb_for_stmt (t, i->bb);
2927 tsi_link_after (&i->tsi, t, m);
2928 modify_stmt (t);
2932 /* Remove the statement pointed to by iterator I. The iterator is updated
2933 to the next statement. */
2935 void
2936 bsi_remove (block_stmt_iterator *i)
2938 tree t = bsi_stmt (*i);
2939 set_bb_for_stmt (t, NULL);
2940 tsi_delink (&i->tsi);
2944 /* Move the statement at FROM so it comes right after the statement at TO. */
2946 void
2947 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2949 tree stmt = bsi_stmt (*from);
2950 bsi_remove (from);
2951 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2955 /* Move the statement at FROM so it comes right before the statement at TO. */
2957 void
2958 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2960 tree stmt = bsi_stmt (*from);
2961 bsi_remove (from);
2962 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2966 /* Move the statement at FROM to the end of basic block BB. */
2968 void
2969 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2971 block_stmt_iterator last = bsi_last (bb);
2973 /* Have to check bsi_end_p because it could be an empty block. */
2974 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2975 bsi_move_before (from, &last);
2976 else
2977 bsi_move_after (from, &last);
2981 /* Replace the contents of the statement pointed to by iterator BSI
2982 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2983 information of the original statement is preserved. */
2985 void
2986 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2988 int eh_region;
2989 tree orig_stmt = bsi_stmt (*bsi);
2991 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2992 set_bb_for_stmt (stmt, bsi->bb);
2994 /* Preserve EH region information from the original statement, if
2995 requested by the caller. */
2996 if (preserve_eh_info)
2998 eh_region = lookup_stmt_eh_region (orig_stmt);
2999 if (eh_region >= 0)
3000 add_stmt_to_eh_region (stmt, eh_region);
3003 *bsi_stmt_ptr (*bsi) = stmt;
3004 modify_stmt (stmt);
3008 /* Insert the statement pointed-to by BSI into edge E. Every attempt
3009 is made to place the statement in an existing basic block, but
3010 sometimes that isn't possible. When it isn't possible, the edge is
3011 split and the statement is added to the new block.
3013 In all cases, the returned *BSI points to the correct location. The
3014 return value is true if insertion should be done after the location,
3015 or false if it should be done before the location. If new basic block
3016 has to be created, it is stored in *NEW_BB. */
3018 static bool
3019 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
3020 basic_block *new_bb)
3022 basic_block dest, src;
3023 tree tmp;
3025 dest = e->dest;
3026 restart:
3028 /* If the destination has one predecessor which has no PHI nodes,
3029 insert there. Except for the exit block.
3031 The requirement for no PHI nodes could be relaxed. Basically we
3032 would have to examine the PHIs to prove that none of them used
3033 the value set by the statement we want to insert on E. That
3034 hardly seems worth the effort. */
3035 if (EDGE_COUNT (dest->preds) == 1
3036 && ! phi_nodes (dest)
3037 && dest != EXIT_BLOCK_PTR)
3039 *bsi = bsi_start (dest);
3040 if (bsi_end_p (*bsi))
3041 return true;
3043 /* Make sure we insert after any leading labels. */
3044 tmp = bsi_stmt (*bsi);
3045 while (TREE_CODE (tmp) == LABEL_EXPR)
3047 bsi_next (bsi);
3048 if (bsi_end_p (*bsi))
3049 break;
3050 tmp = bsi_stmt (*bsi);
3053 if (bsi_end_p (*bsi))
3055 *bsi = bsi_last (dest);
3056 return true;
3058 else
3059 return false;
3062 /* If the source has one successor, the edge is not abnormal and
3063 the last statement does not end a basic block, insert there.
3064 Except for the entry block. */
3065 src = e->src;
3066 if ((e->flags & EDGE_ABNORMAL) == 0
3067 && EDGE_COUNT (src->succs) == 1
3068 && src != ENTRY_BLOCK_PTR)
3070 *bsi = bsi_last (src);
3071 if (bsi_end_p (*bsi))
3072 return true;
3074 tmp = bsi_stmt (*bsi);
3075 if (!stmt_ends_bb_p (tmp))
3076 return true;
3078 /* Insert code just before returning the value. We may need to decompose
3079 the return in the case it contains non-trivial operand. */
3080 if (TREE_CODE (tmp) == RETURN_EXPR)
3082 tree op = TREE_OPERAND (tmp, 0);
3083 if (!is_gimple_val (op))
3085 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3086 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3087 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3089 bsi_prev (bsi);
3090 return true;
3094 /* Otherwise, create a new basic block, and split this edge. */
3095 dest = split_edge (e);
3096 if (new_bb)
3097 *new_bb = dest;
3098 e = EDGE_PRED (dest, 0);
3099 goto restart;
3103 /* This routine will commit all pending edge insertions, creating any new
3104 basic blocks which are necessary. */
3106 void
3107 bsi_commit_edge_inserts (void)
3109 basic_block bb;
3110 edge e;
3111 edge_iterator ei;
3113 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL);
3115 FOR_EACH_BB (bb)
3116 FOR_EACH_EDGE (e, ei, bb->succs)
3117 bsi_commit_one_edge_insert (e, NULL);
3121 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3122 to this block, otherwise set it to NULL. */
3124 void
3125 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3127 if (new_bb)
3128 *new_bb = NULL;
3129 if (PENDING_STMT (e))
3131 block_stmt_iterator bsi;
3132 tree stmt = PENDING_STMT (e);
3134 PENDING_STMT (e) = NULL_TREE;
3136 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3137 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3138 else
3139 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3144 /* Add STMT to the pending list of edge E. No actual insertion is
3145 made until a call to bsi_commit_edge_inserts () is made. */
3147 void
3148 bsi_insert_on_edge (edge e, tree stmt)
3150 append_to_statement_list (stmt, &PENDING_STMT (e));
3153 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3154 block has to be created, it is returned. */
3156 basic_block
3157 bsi_insert_on_edge_immediate (edge e, tree stmt)
3159 block_stmt_iterator bsi;
3160 basic_block new_bb = NULL;
3162 gcc_assert (!PENDING_STMT (e));
3164 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3165 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3166 else
3167 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3169 return new_bb;
3172 /*---------------------------------------------------------------------------
3173 Tree specific functions for CFG manipulation
3174 ---------------------------------------------------------------------------*/
3176 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3178 static void
3179 reinstall_phi_args (edge new_edge, edge old_edge)
3181 tree var, phi;
3183 if (!PENDING_STMT (old_edge))
3184 return;
3186 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3187 var && phi;
3188 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3190 tree result = TREE_PURPOSE (var);
3191 tree arg = TREE_VALUE (var);
3193 gcc_assert (result == PHI_RESULT (phi));
3195 add_phi_arg (phi, arg, new_edge);
3198 PENDING_STMT (old_edge) = NULL;
3201 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3202 Abort on abnormal edges. */
3204 static basic_block
3205 tree_split_edge (edge edge_in)
3207 basic_block new_bb, after_bb, dest, src;
3208 edge new_edge, e;
3210 /* Abnormal edges cannot be split. */
3211 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3213 src = edge_in->src;
3214 dest = edge_in->dest;
3216 /* Place the new block in the block list. Try to keep the new block
3217 near its "logical" location. This is of most help to humans looking
3218 at debugging dumps. */
3219 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3220 after_bb = edge_in->src;
3221 else
3222 after_bb = dest->prev_bb;
3224 new_bb = create_empty_bb (after_bb);
3225 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3226 new_bb->count = edge_in->count;
3227 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3228 new_edge->probability = REG_BR_PROB_BASE;
3229 new_edge->count = edge_in->count;
3231 e = redirect_edge_and_branch (edge_in, new_bb);
3232 gcc_assert (e);
3233 reinstall_phi_args (new_edge, e);
3235 return new_bb;
3239 /* Return true when BB has label LABEL in it. */
3241 static bool
3242 has_label_p (basic_block bb, tree label)
3244 block_stmt_iterator bsi;
3246 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3248 tree stmt = bsi_stmt (bsi);
3250 if (TREE_CODE (stmt) != LABEL_EXPR)
3251 return false;
3252 if (LABEL_EXPR_LABEL (stmt) == label)
3253 return true;
3255 return false;
3259 /* Callback for walk_tree, check that all elements with address taken are
3260 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3261 inside a PHI node. */
3263 static tree
3264 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3266 tree t = *tp, x;
3267 bool in_phi = (data != NULL);
3269 if (TYPE_P (t))
3270 *walk_subtrees = 0;
3272 /* Check operand N for being valid GIMPLE and give error MSG if not.
3273 We check for constants explicitly since they are not considered
3274 gimple invariants if they overflowed. */
3275 #define CHECK_OP(N, MSG) \
3276 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3277 && !is_gimple_val (TREE_OPERAND (t, N))) \
3278 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3280 switch (TREE_CODE (t))
3282 case SSA_NAME:
3283 if (SSA_NAME_IN_FREE_LIST (t))
3285 error ("SSA name in freelist but still referenced");
3286 return *tp;
3288 break;
3290 case MODIFY_EXPR:
3291 x = TREE_OPERAND (t, 0);
3292 if (TREE_CODE (x) == BIT_FIELD_REF
3293 && is_gimple_reg (TREE_OPERAND (x, 0)))
3295 error ("GIMPLE register modified with BIT_FIELD_REF");
3296 return t;
3298 break;
3300 case ADDR_EXPR:
3301 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3302 dead PHIs that take the address of something. But if the PHI
3303 result is dead, the fact that it takes the address of anything
3304 is irrelevant. Because we can not tell from here if a PHI result
3305 is dead, we just skip this check for PHIs altogether. This means
3306 we may be missing "valid" checks, but what can you do?
3307 This was PR19217. */
3308 if (in_phi)
3309 break;
3311 /* Skip any references (they will be checked when we recurse down the
3312 tree) and ensure that any variable used as a prefix is marked
3313 addressable. */
3314 for (x = TREE_OPERAND (t, 0);
3315 handled_component_p (x);
3316 x = TREE_OPERAND (x, 0))
3319 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3320 return NULL;
3321 if (!TREE_ADDRESSABLE (x))
3323 error ("address taken, but ADDRESSABLE bit not set");
3324 return x;
3326 break;
3328 case COND_EXPR:
3329 x = COND_EXPR_COND (t);
3330 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3332 error ("non-boolean used in condition");
3333 return x;
3335 break;
3337 case NOP_EXPR:
3338 case CONVERT_EXPR:
3339 case FIX_TRUNC_EXPR:
3340 case FIX_CEIL_EXPR:
3341 case FIX_FLOOR_EXPR:
3342 case FIX_ROUND_EXPR:
3343 case FLOAT_EXPR:
3344 case NEGATE_EXPR:
3345 case ABS_EXPR:
3346 case BIT_NOT_EXPR:
3347 case NON_LVALUE_EXPR:
3348 case TRUTH_NOT_EXPR:
3349 CHECK_OP (0, "Invalid operand to unary operator");
3350 break;
3352 case REALPART_EXPR:
3353 case IMAGPART_EXPR:
3354 case COMPONENT_REF:
3355 case ARRAY_REF:
3356 case ARRAY_RANGE_REF:
3357 case BIT_FIELD_REF:
3358 case VIEW_CONVERT_EXPR:
3359 /* We have a nest of references. Verify that each of the operands
3360 that determine where to reference is either a constant or a variable,
3361 verify that the base is valid, and then show we've already checked
3362 the subtrees. */
3363 while (handled_component_p (t))
3365 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3366 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3367 else if (TREE_CODE (t) == ARRAY_REF
3368 || TREE_CODE (t) == ARRAY_RANGE_REF)
3370 CHECK_OP (1, "Invalid array index.");
3371 if (TREE_OPERAND (t, 2))
3372 CHECK_OP (2, "Invalid array lower bound.");
3373 if (TREE_OPERAND (t, 3))
3374 CHECK_OP (3, "Invalid array stride.");
3376 else if (TREE_CODE (t) == BIT_FIELD_REF)
3378 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3379 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3382 t = TREE_OPERAND (t, 0);
3385 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3387 error ("Invalid reference prefix.");
3388 return t;
3390 *walk_subtrees = 0;
3391 break;
3393 case LT_EXPR:
3394 case LE_EXPR:
3395 case GT_EXPR:
3396 case GE_EXPR:
3397 case EQ_EXPR:
3398 case NE_EXPR:
3399 case UNORDERED_EXPR:
3400 case ORDERED_EXPR:
3401 case UNLT_EXPR:
3402 case UNLE_EXPR:
3403 case UNGT_EXPR:
3404 case UNGE_EXPR:
3405 case UNEQ_EXPR:
3406 case LTGT_EXPR:
3407 case PLUS_EXPR:
3408 case MINUS_EXPR:
3409 case MULT_EXPR:
3410 case TRUNC_DIV_EXPR:
3411 case CEIL_DIV_EXPR:
3412 case FLOOR_DIV_EXPR:
3413 case ROUND_DIV_EXPR:
3414 case TRUNC_MOD_EXPR:
3415 case CEIL_MOD_EXPR:
3416 case FLOOR_MOD_EXPR:
3417 case ROUND_MOD_EXPR:
3418 case RDIV_EXPR:
3419 case EXACT_DIV_EXPR:
3420 case MIN_EXPR:
3421 case MAX_EXPR:
3422 case LSHIFT_EXPR:
3423 case RSHIFT_EXPR:
3424 case LROTATE_EXPR:
3425 case RROTATE_EXPR:
3426 case BIT_IOR_EXPR:
3427 case BIT_XOR_EXPR:
3428 case BIT_AND_EXPR:
3429 CHECK_OP (0, "Invalid operand to binary operator");
3430 CHECK_OP (1, "Invalid operand to binary operator");
3431 break;
3433 default:
3434 break;
3436 return NULL;
3438 #undef CHECK_OP
3442 /* Verify STMT, return true if STMT is not in GIMPLE form.
3443 TODO: Implement type checking. */
3445 static bool
3446 verify_stmt (tree stmt, bool last_in_block)
3448 tree addr;
3450 if (!is_gimple_stmt (stmt))
3452 error ("Is not a valid GIMPLE statement.");
3453 goto fail;
3456 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3457 if (addr)
3459 debug_generic_stmt (addr);
3460 return true;
3463 /* If the statement is marked as part of an EH region, then it is
3464 expected that the statement could throw. Verify that when we
3465 have optimizations that simplify statements such that we prove
3466 that they cannot throw, that we update other data structures
3467 to match. */
3468 if (lookup_stmt_eh_region (stmt) >= 0)
3470 if (!tree_could_throw_p (stmt))
3472 error ("Statement marked for throw, but doesn%'t.");
3473 goto fail;
3475 if (!last_in_block && tree_can_throw_internal (stmt))
3477 error ("Statement marked for throw in middle of block.");
3478 goto fail;
3482 return false;
3484 fail:
3485 debug_generic_stmt (stmt);
3486 return true;
3490 /* Return true when the T can be shared. */
3492 static bool
3493 tree_node_can_be_shared (tree t)
3495 if (IS_TYPE_OR_DECL_P (t)
3496 /* We check for constants explicitly since they are not considered
3497 gimple invariants if they overflowed. */
3498 || CONSTANT_CLASS_P (t)
3499 || is_gimple_min_invariant (t)
3500 || TREE_CODE (t) == SSA_NAME
3501 || t == error_mark_node)
3502 return true;
3504 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3505 return true;
3507 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3508 /* We check for constants explicitly since they are not considered
3509 gimple invariants if they overflowed. */
3510 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3511 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3512 || (TREE_CODE (t) == COMPONENT_REF
3513 || TREE_CODE (t) == REALPART_EXPR
3514 || TREE_CODE (t) == IMAGPART_EXPR))
3515 t = TREE_OPERAND (t, 0);
3517 if (DECL_P (t))
3518 return true;
3520 return false;
3524 /* Called via walk_trees. Verify tree sharing. */
3526 static tree
3527 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3529 htab_t htab = (htab_t) data;
3530 void **slot;
3532 if (tree_node_can_be_shared (*tp))
3534 *walk_subtrees = false;
3535 return NULL;
3538 slot = htab_find_slot (htab, *tp, INSERT);
3539 if (*slot)
3540 return *slot;
3541 *slot = *tp;
3543 return NULL;
3547 /* Verify the GIMPLE statement chain. */
3549 void
3550 verify_stmts (void)
3552 basic_block bb;
3553 block_stmt_iterator bsi;
3554 bool err = false;
3555 htab_t htab;
3556 tree addr;
3558 timevar_push (TV_TREE_STMT_VERIFY);
3559 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3561 FOR_EACH_BB (bb)
3563 tree phi;
3564 int i;
3566 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3568 int phi_num_args = PHI_NUM_ARGS (phi);
3570 for (i = 0; i < phi_num_args; i++)
3572 tree t = PHI_ARG_DEF (phi, i);
3573 tree addr;
3575 /* Addressable variables do have SSA_NAMEs but they
3576 are not considered gimple values. */
3577 if (TREE_CODE (t) != SSA_NAME
3578 && TREE_CODE (t) != FUNCTION_DECL
3579 && !is_gimple_val (t))
3581 error ("PHI def is not a GIMPLE value");
3582 debug_generic_stmt (phi);
3583 debug_generic_stmt (t);
3584 err |= true;
3587 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3588 if (addr)
3590 debug_generic_stmt (addr);
3591 err |= true;
3594 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3595 if (addr)
3597 error ("Incorrect sharing of tree nodes");
3598 debug_generic_stmt (phi);
3599 debug_generic_stmt (addr);
3600 err |= true;
3605 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3607 tree stmt = bsi_stmt (bsi);
3608 bsi_next (&bsi);
3609 err |= verify_stmt (stmt, bsi_end_p (bsi));
3610 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3611 if (addr)
3613 error ("Incorrect sharing of tree nodes");
3614 debug_generic_stmt (stmt);
3615 debug_generic_stmt (addr);
3616 err |= true;
3621 if (err)
3622 internal_error ("verify_stmts failed.");
3624 htab_delete (htab);
3625 timevar_pop (TV_TREE_STMT_VERIFY);
3629 /* Verifies that the flow information is OK. */
3631 static int
3632 tree_verify_flow_info (void)
3634 int err = 0;
3635 basic_block bb;
3636 block_stmt_iterator bsi;
3637 tree stmt;
3638 edge e;
3639 edge_iterator ei;
3641 if (ENTRY_BLOCK_PTR->stmt_list)
3643 error ("ENTRY_BLOCK has a statement list associated with it\n");
3644 err = 1;
3647 if (EXIT_BLOCK_PTR->stmt_list)
3649 error ("EXIT_BLOCK has a statement list associated with it\n");
3650 err = 1;
3653 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3654 if (e->flags & EDGE_FALLTHRU)
3656 error ("Fallthru to exit from bb %d\n", e->src->index);
3657 err = 1;
3660 FOR_EACH_BB (bb)
3662 bool found_ctrl_stmt = false;
3664 stmt = NULL_TREE;
3666 /* Skip labels on the start of basic block. */
3667 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3669 tree prev_stmt = stmt;
3671 stmt = bsi_stmt (bsi);
3673 if (TREE_CODE (stmt) != LABEL_EXPR)
3674 break;
3676 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3678 error ("Nonlocal label %s is not first "
3679 "in a sequence of labels in bb %d",
3680 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3681 bb->index);
3682 err = 1;
3685 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3687 error ("Label %s to block does not match in bb %d\n",
3688 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3689 bb->index);
3690 err = 1;
3693 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3694 != current_function_decl)
3696 error ("Label %s has incorrect context in bb %d\n",
3697 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3698 bb->index);
3699 err = 1;
3703 /* Verify that body of basic block BB is free of control flow. */
3704 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3706 tree stmt = bsi_stmt (bsi);
3708 if (found_ctrl_stmt)
3710 error ("Control flow in the middle of basic block %d\n",
3711 bb->index);
3712 err = 1;
3715 if (stmt_ends_bb_p (stmt))
3716 found_ctrl_stmt = true;
3718 if (TREE_CODE (stmt) == LABEL_EXPR)
3720 error ("Label %s in the middle of basic block %d\n",
3721 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3722 bb->index);
3723 err = 1;
3726 bsi = bsi_last (bb);
3727 if (bsi_end_p (bsi))
3728 continue;
3730 stmt = bsi_stmt (bsi);
3732 if (is_ctrl_stmt (stmt))
3734 FOR_EACH_EDGE (e, ei, bb->succs)
3735 if (e->flags & EDGE_FALLTHRU)
3737 error ("Fallthru edge after a control statement in bb %d \n",
3738 bb->index);
3739 err = 1;
3743 switch (TREE_CODE (stmt))
3745 case COND_EXPR:
3747 edge true_edge;
3748 edge false_edge;
3749 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3750 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3752 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3753 err = 1;
3756 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3758 if (!true_edge || !false_edge
3759 || !(true_edge->flags & EDGE_TRUE_VALUE)
3760 || !(false_edge->flags & EDGE_FALSE_VALUE)
3761 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3762 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3763 || EDGE_COUNT (bb->succs) >= 3)
3765 error ("Wrong outgoing edge flags at end of bb %d\n",
3766 bb->index);
3767 err = 1;
3770 if (!has_label_p (true_edge->dest,
3771 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3773 error ("%<then%> label does not match edge at end of bb %d\n",
3774 bb->index);
3775 err = 1;
3778 if (!has_label_p (false_edge->dest,
3779 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3781 error ("%<else%> label does not match edge at end of bb %d\n",
3782 bb->index);
3783 err = 1;
3786 break;
3788 case GOTO_EXPR:
3789 if (simple_goto_p (stmt))
3791 error ("Explicit goto at end of bb %d\n", bb->index);
3792 err = 1;
3794 else
3796 /* FIXME. We should double check that the labels in the
3797 destination blocks have their address taken. */
3798 FOR_EACH_EDGE (e, ei, bb->succs)
3799 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3800 | EDGE_FALSE_VALUE))
3801 || !(e->flags & EDGE_ABNORMAL))
3803 error ("Wrong outgoing edge flags at end of bb %d\n",
3804 bb->index);
3805 err = 1;
3808 break;
3810 case RETURN_EXPR:
3811 if (EDGE_COUNT (bb->succs) != 1
3812 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3813 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3815 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3816 err = 1;
3818 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
3820 error ("Return edge does not point to exit in bb %d\n",
3821 bb->index);
3822 err = 1;
3824 break;
3826 case SWITCH_EXPR:
3828 tree prev;
3829 edge e;
3830 size_t i, n;
3831 tree vec;
3833 vec = SWITCH_LABELS (stmt);
3834 n = TREE_VEC_LENGTH (vec);
3836 /* Mark all the destination basic blocks. */
3837 for (i = 0; i < n; ++i)
3839 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3840 basic_block label_bb = label_to_block (lab);
3842 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3843 label_bb->aux = (void *)1;
3846 /* Verify that the case labels are sorted. */
3847 prev = TREE_VEC_ELT (vec, 0);
3848 for (i = 1; i < n - 1; ++i)
3850 tree c = TREE_VEC_ELT (vec, i);
3851 if (! CASE_LOW (c))
3853 error ("Found default case not at end of case vector");
3854 err = 1;
3855 continue;
3857 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3859 error ("Case labels not sorted:\n ");
3860 print_generic_expr (stderr, prev, 0);
3861 fprintf (stderr," is greater than ");
3862 print_generic_expr (stderr, c, 0);
3863 fprintf (stderr," but comes before it.\n");
3864 err = 1;
3866 prev = c;
3868 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3870 error ("No default case found at end of case vector");
3871 err = 1;
3874 FOR_EACH_EDGE (e, ei, bb->succs)
3876 if (!e->dest->aux)
3878 error ("Extra outgoing edge %d->%d\n",
3879 bb->index, e->dest->index);
3880 err = 1;
3882 e->dest->aux = (void *)2;
3883 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3884 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3886 error ("Wrong outgoing edge flags at end of bb %d\n",
3887 bb->index);
3888 err = 1;
3892 /* Check that we have all of them. */
3893 for (i = 0; i < n; ++i)
3895 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3896 basic_block label_bb = label_to_block (lab);
3898 if (label_bb->aux != (void *)2)
3900 error ("Missing edge %i->%i",
3901 bb->index, label_bb->index);
3902 err = 1;
3906 FOR_EACH_EDGE (e, ei, bb->succs)
3907 e->dest->aux = (void *)0;
3910 default: ;
3914 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3915 verify_dominators (CDI_DOMINATORS);
3917 return err;
3921 /* Updates phi nodes after creating a forwarder block joined
3922 by edge FALLTHRU. */
3924 static void
3925 tree_make_forwarder_block (edge fallthru)
3927 edge e;
3928 edge_iterator ei;
3929 basic_block dummy, bb;
3930 tree phi, new_phi, var;
3932 dummy = fallthru->src;
3933 bb = fallthru->dest;
3935 if (EDGE_COUNT (bb->preds) == 1)
3936 return;
3938 /* If we redirected a branch we must create new phi nodes at the
3939 start of BB. */
3940 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3942 var = PHI_RESULT (phi);
3943 new_phi = create_phi_node (var, bb);
3944 SSA_NAME_DEF_STMT (var) = new_phi;
3945 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3946 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3949 /* Ensure that the PHI node chain is in the same order. */
3950 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3952 /* Add the arguments we have stored on edges. */
3953 FOR_EACH_EDGE (e, ei, bb->preds)
3955 if (e == fallthru)
3956 continue;
3958 flush_pending_stmts (e);
3963 /* Return true if basic block BB does nothing except pass control
3964 flow to another block and that we can safely insert a label at
3965 the start of the successor block.
3967 As a precondition, we require that BB be not equal to
3968 ENTRY_BLOCK_PTR. */
3970 static bool
3971 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
3973 block_stmt_iterator bsi;
3975 /* BB must have a single outgoing edge. */
3976 if (EDGE_COUNT (bb->succs) != 1
3977 /* If PHI_WANTED is false, BB must not have any PHI nodes.
3978 Otherwise, BB must have PHI nodes. */
3979 || (phi_nodes (bb) != NULL_TREE) != phi_wanted
3980 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3981 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
3982 /* Nor should this be an infinite loop. */
3983 || EDGE_SUCC (bb, 0)->dest == bb
3984 /* BB may not have an abnormal outgoing edge. */
3985 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
3986 return false;
3988 #if ENABLE_CHECKING
3989 gcc_assert (bb != ENTRY_BLOCK_PTR);
3990 #endif
3992 /* Now walk through the statements backward. We can ignore labels,
3993 anything else means this is not a forwarder block. */
3994 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3996 tree stmt = bsi_stmt (bsi);
3998 switch (TREE_CODE (stmt))
4000 case LABEL_EXPR:
4001 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4002 return false;
4003 break;
4005 default:
4006 return false;
4010 if (find_edge (ENTRY_BLOCK_PTR, bb))
4011 return false;
4013 return true;
4016 /* Return true if BB has at least one abnormal incoming edge. */
4018 static inline bool
4019 has_abnormal_incoming_edge_p (basic_block bb)
4021 edge e;
4022 edge_iterator ei;
4024 FOR_EACH_EDGE (e, ei, bb->preds)
4025 if (e->flags & EDGE_ABNORMAL)
4026 return true;
4028 return false;
4031 /* Removes forwarder block BB. Returns false if this failed. If a new
4032 forwarder block is created due to redirection of edges, it is
4033 stored to worklist. */
4035 static bool
4036 remove_forwarder_block (basic_block bb, basic_block **worklist)
4038 edge succ = EDGE_SUCC (bb, 0), e, s;
4039 basic_block dest = succ->dest;
4040 tree label;
4041 tree phi;
4042 edge_iterator ei;
4043 block_stmt_iterator bsi, bsi_to;
4044 bool seen_abnormal_edge = false;
4046 /* We check for infinite loops already in tree_forwarder_block_p.
4047 However it may happen that the infinite loop is created
4048 afterwards due to removal of forwarders. */
4049 if (dest == bb)
4050 return false;
4052 /* If the destination block consists of a nonlocal label, do not merge
4053 it. */
4054 label = first_stmt (dest);
4055 if (label
4056 && TREE_CODE (label) == LABEL_EXPR
4057 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4058 return false;
4060 /* If there is an abnormal edge to basic block BB, but not into
4061 dest, problems might occur during removal of the phi node at out
4062 of ssa due to overlapping live ranges of registers.
4064 If there is an abnormal edge in DEST, the problems would occur
4065 anyway since cleanup_dead_labels would then merge the labels for
4066 two different eh regions, and rest of exception handling code
4067 does not like it.
4069 So if there is an abnormal edge to BB, proceed only if there is
4070 no abnormal edge to DEST and there are no phi nodes in DEST. */
4071 if (has_abnormal_incoming_edge_p (bb))
4073 seen_abnormal_edge = true;
4075 if (has_abnormal_incoming_edge_p (dest)
4076 || phi_nodes (dest) != NULL_TREE)
4077 return false;
4080 /* If there are phi nodes in DEST, and some of the blocks that are
4081 predecessors of BB are also predecessors of DEST, check that the
4082 phi node arguments match. */
4083 if (phi_nodes (dest))
4085 FOR_EACH_EDGE (e, ei, bb->preds)
4087 s = find_edge (e->src, dest);
4088 if (!s)
4089 continue;
4091 if (!phi_alternatives_equal (dest, succ, s))
4092 return false;
4096 /* Redirect the edges. */
4097 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4099 if (e->flags & EDGE_ABNORMAL)
4101 /* If there is an abnormal edge, redirect it anyway, and
4102 move the labels to the new block to make it legal. */
4103 s = redirect_edge_succ_nodup (e, dest);
4105 else
4106 s = redirect_edge_and_branch (e, dest);
4108 if (s == e)
4110 /* Create arguments for the phi nodes, since the edge was not
4111 here before. */
4112 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4113 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
4115 else
4117 /* The source basic block might become a forwarder. We know
4118 that it was not a forwarder before, since it used to have
4119 at least two outgoing edges, so we may just add it to
4120 worklist. */
4121 if (tree_forwarder_block_p (s->src, false))
4122 *(*worklist)++ = s->src;
4126 if (seen_abnormal_edge)
4128 /* Move the labels to the new block, so that the redirection of
4129 the abnormal edges works. */
4131 bsi_to = bsi_start (dest);
4132 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4134 label = bsi_stmt (bsi);
4135 gcc_assert (TREE_CODE (label) == LABEL_EXPR);
4136 bsi_remove (&bsi);
4137 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
4141 /* Update the dominators. */
4142 if (dom_info_available_p (CDI_DOMINATORS))
4144 basic_block dom, dombb, domdest;
4146 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4147 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4148 if (domdest == bb)
4150 /* Shortcut to avoid calling (relatively expensive)
4151 nearest_common_dominator unless necessary. */
4152 dom = dombb;
4154 else
4155 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4157 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4160 /* And kill the forwarder block. */
4161 delete_basic_block (bb);
4163 return true;
4166 /* Removes forwarder blocks. */
4168 static bool
4169 cleanup_forwarder_blocks (void)
4171 basic_block bb;
4172 bool changed = false;
4173 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4174 basic_block *current = worklist;
4176 FOR_EACH_BB (bb)
4178 if (tree_forwarder_block_p (bb, false))
4179 *current++ = bb;
4182 while (current != worklist)
4184 bb = *--current;
4185 changed |= remove_forwarder_block (bb, &current);
4188 free (worklist);
4189 return changed;
4192 /* Merge the PHI nodes at BB into those at BB's sole successor. */
4194 static void
4195 remove_forwarder_block_with_phi (basic_block bb)
4197 edge succ = EDGE_SUCC (bb, 0);
4198 basic_block dest = succ->dest;
4199 tree label;
4200 basic_block dombb, domdest, dom;
4202 /* We check for infinite loops already in tree_forwarder_block_p.
4203 However it may happen that the infinite loop is created
4204 afterwards due to removal of forwarders. */
4205 if (dest == bb)
4206 return;
4208 /* If the destination block consists of a nonlocal label, do not
4209 merge it. */
4210 label = first_stmt (dest);
4211 if (label
4212 && TREE_CODE (label) == LABEL_EXPR
4213 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
4214 return;
4216 /* Redirect each incoming edge to BB to DEST. */
4217 while (EDGE_COUNT (bb->preds) > 0)
4219 edge e = EDGE_PRED (bb, 0), s;
4220 tree phi;
4222 s = find_edge (e->src, dest);
4223 if (s)
4225 /* We already have an edge S from E->src to DEST. If S and
4226 E->dest's sole successor edge have the same PHI arguments
4227 at DEST, redirect S to DEST. */
4228 if (phi_alternatives_equal (dest, s, succ))
4230 e = redirect_edge_and_branch (e, dest);
4231 PENDING_STMT (e) = NULL_TREE;
4232 continue;
4235 /* PHI arguments are different. Create a forwarder block by
4236 splitting E so that we can merge PHI arguments on E to
4237 DEST. */
4238 e = EDGE_SUCC (split_edge (e), 0);
4241 s = redirect_edge_and_branch (e, dest);
4243 /* redirect_edge_and_branch must not create a new edge. */
4244 gcc_assert (s == e);
4246 /* Add to the PHI nodes at DEST each PHI argument removed at the
4247 destination of E. */
4248 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4250 tree def = PHI_ARG_DEF (phi, succ->dest_idx);
4252 if (TREE_CODE (def) == SSA_NAME)
4254 tree var;
4256 /* If DEF is one of the results of PHI nodes removed during
4257 redirection, replace it with the PHI argument that used
4258 to be on E. */
4259 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
4261 tree old_arg = TREE_PURPOSE (var);
4262 tree new_arg = TREE_VALUE (var);
4264 if (def == old_arg)
4266 def = new_arg;
4267 break;
4272 add_phi_arg (phi, def, s);
4275 PENDING_STMT (e) = NULL;
4278 /* Update the dominators. */
4279 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
4280 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
4281 if (domdest == bb)
4283 /* Shortcut to avoid calling (relatively expensive)
4284 nearest_common_dominator unless necessary. */
4285 dom = dombb;
4287 else
4288 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
4290 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
4292 /* Remove BB since all of BB's incoming edges have been redirected
4293 to DEST. */
4294 delete_basic_block (bb);
4297 /* This pass merges PHI nodes if one feeds into another. For example,
4298 suppose we have the following:
4300 goto <bb 9> (<L9>);
4302 <L8>:;
4303 tem_17 = foo ();
4305 # tem_6 = PHI <tem_17(8), tem_23(7)>;
4306 <L9>:;
4308 # tem_3 = PHI <tem_6(9), tem_2(5)>;
4309 <L10>:;
4311 Then we merge the first PHI node into the second one like so:
4313 goto <bb 9> (<L10>);
4315 <L8>:;
4316 tem_17 = foo ();
4318 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
4319 <L10>:;
4322 static void
4323 merge_phi_nodes (void)
4325 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
4326 basic_block *current = worklist;
4327 basic_block bb;
4329 calculate_dominance_info (CDI_DOMINATORS);
4331 /* Find all PHI nodes that we may be able to merge. */
4332 FOR_EACH_BB (bb)
4334 basic_block dest;
4336 /* Look for a forwarder block with PHI nodes. */
4337 if (!tree_forwarder_block_p (bb, true))
4338 continue;
4340 dest = EDGE_SUCC (bb, 0)->dest;
4342 /* We have to feed into another basic block with PHI
4343 nodes. */
4344 if (!phi_nodes (dest)
4345 /* We don't want to deal with a basic block with
4346 abnormal edges. */
4347 || has_abnormal_incoming_edge_p (bb))
4348 continue;
4350 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
4352 /* If BB does not dominate DEST, then the PHI nodes at
4353 DEST must be the only users of the results of the PHI
4354 nodes at BB. */
4355 *current++ = bb;
4359 /* Now let's drain WORKLIST. */
4360 while (current != worklist)
4362 bb = *--current;
4363 remove_forwarder_block_with_phi (bb);
4366 free (worklist);
4369 static bool
4370 gate_merge_phi (void)
4372 return 1;
4375 struct tree_opt_pass pass_merge_phi = {
4376 "mergephi", /* name */
4377 gate_merge_phi, /* gate */
4378 merge_phi_nodes, /* execute */
4379 NULL, /* sub */
4380 NULL, /* next */
4381 0, /* static_pass_number */
4382 TV_TREE_MERGE_PHI, /* tv_id */
4383 PROP_cfg | PROP_ssa, /* properties_required */
4384 0, /* properties_provided */
4385 0, /* properties_destroyed */
4386 0, /* todo_flags_start */
4387 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
4388 | TODO_verify_ssa,
4389 0 /* letter */
4392 /* Return a non-special label in the head of basic block BLOCK.
4393 Create one if it doesn't exist. */
4395 tree
4396 tree_block_label (basic_block bb)
4398 block_stmt_iterator i, s = bsi_start (bb);
4399 bool first = true;
4400 tree label, stmt;
4402 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4404 stmt = bsi_stmt (i);
4405 if (TREE_CODE (stmt) != LABEL_EXPR)
4406 break;
4407 label = LABEL_EXPR_LABEL (stmt);
4408 if (!DECL_NONLOCAL (label))
4410 if (!first)
4411 bsi_move_before (&i, &s);
4412 return label;
4416 label = create_artificial_label ();
4417 stmt = build1 (LABEL_EXPR, void_type_node, label);
4418 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4419 return label;
4423 /* Attempt to perform edge redirection by replacing a possibly complex
4424 jump instruction by a goto or by removing the jump completely.
4425 This can apply only if all edges now point to the same block. The
4426 parameters and return values are equivalent to
4427 redirect_edge_and_branch. */
4429 static edge
4430 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4432 basic_block src = e->src;
4433 block_stmt_iterator b;
4434 tree stmt;
4436 /* We can replace or remove a complex jump only when we have exactly
4437 two edges. */
4438 if (EDGE_COUNT (src->succs) != 2
4439 /* Verify that all targets will be TARGET. Specifically, the
4440 edge that is not E must also go to TARGET. */
4441 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4442 return NULL;
4444 b = bsi_last (src);
4445 if (bsi_end_p (b))
4446 return NULL;
4447 stmt = bsi_stmt (b);
4449 if (TREE_CODE (stmt) == COND_EXPR
4450 || TREE_CODE (stmt) == SWITCH_EXPR)
4452 bsi_remove (&b);
4453 e = ssa_redirect_edge (e, target);
4454 e->flags = EDGE_FALLTHRU;
4455 return e;
4458 return NULL;
4462 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4463 edge representing the redirected branch. */
4465 static edge
4466 tree_redirect_edge_and_branch (edge e, basic_block dest)
4468 basic_block bb = e->src;
4469 block_stmt_iterator bsi;
4470 edge ret;
4471 tree label, stmt;
4473 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4474 return NULL;
4476 if (e->src != ENTRY_BLOCK_PTR
4477 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4478 return ret;
4480 if (e->dest == dest)
4481 return NULL;
4483 label = tree_block_label (dest);
4485 bsi = bsi_last (bb);
4486 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4488 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4490 case COND_EXPR:
4491 stmt = (e->flags & EDGE_TRUE_VALUE
4492 ? COND_EXPR_THEN (stmt)
4493 : COND_EXPR_ELSE (stmt));
4494 GOTO_DESTINATION (stmt) = label;
4495 break;
4497 case GOTO_EXPR:
4498 /* No non-abnormal edges should lead from a non-simple goto, and
4499 simple ones should be represented implicitly. */
4500 gcc_unreachable ();
4502 case SWITCH_EXPR:
4504 tree cases = get_cases_for_edge (e, stmt);
4506 /* If we have a list of cases associated with E, then use it
4507 as it's a lot faster than walking the entire case vector. */
4508 if (cases)
4510 edge e2 = find_edge (e->src, dest);
4511 tree last, first;
4513 first = cases;
4514 while (cases)
4516 last = cases;
4517 CASE_LABEL (cases) = label;
4518 cases = TREE_CHAIN (cases);
4521 /* If there was already an edge in the CFG, then we need
4522 to move all the cases associated with E to E2. */
4523 if (e2)
4525 tree cases2 = get_cases_for_edge (e2, stmt);
4527 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4528 TREE_CHAIN (cases2) = first;
4531 else
4533 tree vec = SWITCH_LABELS (stmt);
4534 size_t i, n = TREE_VEC_LENGTH (vec);
4536 for (i = 0; i < n; i++)
4538 tree elt = TREE_VEC_ELT (vec, i);
4540 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4541 CASE_LABEL (elt) = label;
4545 break;
4548 case RETURN_EXPR:
4549 bsi_remove (&bsi);
4550 e->flags |= EDGE_FALLTHRU;
4551 break;
4553 default:
4554 /* Otherwise it must be a fallthru edge, and we don't need to
4555 do anything besides redirecting it. */
4556 gcc_assert (e->flags & EDGE_FALLTHRU);
4557 break;
4560 /* Update/insert PHI nodes as necessary. */
4562 /* Now update the edges in the CFG. */
4563 e = ssa_redirect_edge (e, dest);
4565 return e;
4569 /* Simple wrapper, as we can always redirect fallthru edges. */
4571 static basic_block
4572 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4574 e = tree_redirect_edge_and_branch (e, dest);
4575 gcc_assert (e);
4577 return NULL;
4581 /* Splits basic block BB after statement STMT (but at least after the
4582 labels). If STMT is NULL, BB is split just after the labels. */
4584 static basic_block
4585 tree_split_block (basic_block bb, void *stmt)
4587 block_stmt_iterator bsi, bsi_tgt;
4588 tree act;
4589 basic_block new_bb;
4590 edge e;
4591 edge_iterator ei;
4593 new_bb = create_empty_bb (bb);
4595 /* Redirect the outgoing edges. */
4596 new_bb->succs = bb->succs;
4597 bb->succs = NULL;
4598 FOR_EACH_EDGE (e, ei, new_bb->succs)
4599 e->src = new_bb;
4601 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4602 stmt = NULL;
4604 /* Move everything from BSI to the new basic block. */
4605 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4607 act = bsi_stmt (bsi);
4608 if (TREE_CODE (act) == LABEL_EXPR)
4609 continue;
4611 if (!stmt)
4612 break;
4614 if (stmt == act)
4616 bsi_next (&bsi);
4617 break;
4621 bsi_tgt = bsi_start (new_bb);
4622 while (!bsi_end_p (bsi))
4624 act = bsi_stmt (bsi);
4625 bsi_remove (&bsi);
4626 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4629 return new_bb;
4633 /* Moves basic block BB after block AFTER. */
4635 static bool
4636 tree_move_block_after (basic_block bb, basic_block after)
4638 if (bb->prev_bb == after)
4639 return true;
4641 unlink_block (bb);
4642 link_block (bb, after);
4644 return true;
4648 /* Return true if basic_block can be duplicated. */
4650 static bool
4651 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4653 return true;
4656 /* Create a duplicate of the basic block BB. NOTE: This does not
4657 preserve SSA form. */
4659 static basic_block
4660 tree_duplicate_bb (basic_block bb)
4662 basic_block new_bb;
4663 block_stmt_iterator bsi, bsi_tgt;
4664 tree phi, val;
4665 ssa_op_iter op_iter;
4667 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4669 /* First copy the phi nodes. We do not copy phi node arguments here,
4670 since the edges are not ready yet. Keep the chain of phi nodes in
4671 the same order, so that we can add them later. */
4672 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4674 mark_for_rewrite (PHI_RESULT (phi));
4675 create_phi_node (PHI_RESULT (phi), new_bb);
4677 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4679 bsi_tgt = bsi_start (new_bb);
4680 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4682 tree stmt = bsi_stmt (bsi);
4683 tree copy;
4685 if (TREE_CODE (stmt) == LABEL_EXPR)
4686 continue;
4688 /* Record the definitions. */
4689 get_stmt_operands (stmt);
4691 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4692 mark_for_rewrite (val);
4694 copy = unshare_expr (stmt);
4696 /* Copy also the virtual operands. */
4697 get_stmt_ann (copy);
4698 copy_virtual_operands (copy, stmt);
4700 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4703 return new_bb;
4706 /* Basic block BB_COPY was created by code duplication. Add phi node
4707 arguments for edges going out of BB_COPY. The blocks that were
4708 duplicated have rbi->duplicated set to one. */
4710 void
4711 add_phi_args_after_copy_bb (basic_block bb_copy)
4713 basic_block bb, dest;
4714 edge e, e_copy;
4715 edge_iterator ei;
4716 tree phi, phi_copy, phi_next, def;
4718 bb = bb_copy->rbi->original;
4720 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4722 if (!phi_nodes (e_copy->dest))
4723 continue;
4725 if (e_copy->dest->rbi->duplicated)
4726 dest = e_copy->dest->rbi->original;
4727 else
4728 dest = e_copy->dest;
4730 e = find_edge (bb, dest);
4731 if (!e)
4733 /* During loop unrolling the target of the latch edge is copied.
4734 In this case we are not looking for edge to dest, but to
4735 duplicated block whose original was dest. */
4736 FOR_EACH_EDGE (e, ei, bb->succs)
4737 if (e->dest->rbi->duplicated
4738 && e->dest->rbi->original == dest)
4739 break;
4741 gcc_assert (e != NULL);
4744 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4745 phi;
4746 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4748 phi_next = PHI_CHAIN (phi);
4750 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4751 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4752 add_phi_arg (phi_copy, def, e_copy);
4757 /* Blocks in REGION_COPY array of length N_REGION were created by
4758 duplication of basic blocks. Add phi node arguments for edges
4759 going from these blocks. */
4761 void
4762 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4764 unsigned i;
4766 for (i = 0; i < n_region; i++)
4767 region_copy[i]->rbi->duplicated = 1;
4769 for (i = 0; i < n_region; i++)
4770 add_phi_args_after_copy_bb (region_copy[i]);
4772 for (i = 0; i < n_region; i++)
4773 region_copy[i]->rbi->duplicated = 0;
4776 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4778 struct ssa_name_map_entry
4780 tree from_name;
4781 tree to_name;
4784 /* Hash function for ssa_name_map_entry. */
4786 static hashval_t
4787 ssa_name_map_entry_hash (const void *entry)
4789 const struct ssa_name_map_entry *en = entry;
4790 return SSA_NAME_VERSION (en->from_name);
4793 /* Equality function for ssa_name_map_entry. */
4795 static int
4796 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4798 const struct ssa_name_map_entry *en = in_table;
4800 return en->from_name == ssa_name;
4803 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4804 to MAP. */
4806 void
4807 allocate_ssa_names (bitmap definitions, htab_t *map)
4809 tree name;
4810 struct ssa_name_map_entry *entry;
4811 PTR *slot;
4812 unsigned ver;
4813 bitmap_iterator bi;
4815 if (!*map)
4816 *map = htab_create (10, ssa_name_map_entry_hash,
4817 ssa_name_map_entry_eq, free);
4818 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4820 name = ssa_name (ver);
4821 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4822 INSERT);
4823 if (*slot)
4824 entry = *slot;
4825 else
4827 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4828 entry->from_name = name;
4829 *slot = entry;
4831 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4835 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4836 by the mapping MAP. */
4838 static void
4839 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4841 tree name = DEF_FROM_PTR (def);
4842 struct ssa_name_map_entry *entry;
4844 gcc_assert (TREE_CODE (name) == SSA_NAME);
4846 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4847 if (!entry)
4848 return;
4850 SET_DEF (def, entry->to_name);
4851 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4854 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4856 static void
4857 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4859 tree name = USE_FROM_PTR (use);
4860 struct ssa_name_map_entry *entry;
4862 if (TREE_CODE (name) != SSA_NAME)
4863 return;
4865 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4866 if (!entry)
4867 return;
4869 SET_USE (use, entry->to_name);
4872 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4873 mapping MAP. */
4875 void
4876 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4878 unsigned i;
4879 edge e;
4880 edge_iterator ei;
4881 tree phi, stmt;
4882 block_stmt_iterator bsi;
4883 use_optype uses;
4884 vuse_optype vuses;
4885 def_optype defs;
4886 v_may_def_optype v_may_defs;
4887 v_must_def_optype v_must_defs;
4888 stmt_ann_t ann;
4890 FOR_EACH_EDGE (e, ei, bb->preds)
4891 if (e->flags & EDGE_ABNORMAL)
4892 break;
4894 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4896 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4897 if (e)
4898 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4901 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4903 stmt = bsi_stmt (bsi);
4904 get_stmt_operands (stmt);
4905 ann = stmt_ann (stmt);
4907 uses = USE_OPS (ann);
4908 for (i = 0; i < NUM_USES (uses); i++)
4909 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4911 defs = DEF_OPS (ann);
4912 for (i = 0; i < NUM_DEFS (defs); i++)
4913 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4915 vuses = VUSE_OPS (ann);
4916 for (i = 0; i < NUM_VUSES (vuses); i++)
4917 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4919 v_may_defs = V_MAY_DEF_OPS (ann);
4920 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4922 rewrite_to_new_ssa_names_use
4923 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4924 rewrite_to_new_ssa_names_def
4925 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4928 v_must_defs = V_MUST_DEF_OPS (ann);
4929 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4931 rewrite_to_new_ssa_names_def
4932 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4933 rewrite_to_new_ssa_names_use
4934 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4938 FOR_EACH_EDGE (e, ei, bb->succs)
4939 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
4941 rewrite_to_new_ssa_names_use
4942 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4944 if (e->flags & EDGE_ABNORMAL)
4946 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4947 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4952 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4953 by the mapping MAP. */
4955 void
4956 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4958 unsigned r;
4960 for (r = 0; r < n_region; r++)
4961 rewrite_to_new_ssa_names_bb (region[r], map);
4964 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4965 important exit edge EXIT. By important we mean that no SSA name defined
4966 inside region is live over the other exit edges of the region. All entry
4967 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4968 to the duplicate of the region. SSA form, dominance and loop information
4969 is updated. The new basic blocks are stored to REGION_COPY in the same
4970 order as they had in REGION, provided that REGION_COPY is not NULL.
4971 The function returns false if it is unable to copy the region,
4972 true otherwise. */
4974 bool
4975 tree_duplicate_sese_region (edge entry, edge exit,
4976 basic_block *region, unsigned n_region,
4977 basic_block *region_copy)
4979 unsigned i, n_doms, ver;
4980 bool free_region_copy = false, copying_header = false;
4981 struct loop *loop = entry->dest->loop_father;
4982 edge exit_copy;
4983 bitmap definitions;
4984 tree phi;
4985 basic_block *doms;
4986 htab_t ssa_name_map = NULL;
4987 edge redirected;
4988 bitmap_iterator bi;
4990 if (!can_copy_bbs_p (region, n_region))
4991 return false;
4993 /* Some sanity checking. Note that we do not check for all possible
4994 missuses of the functions. I.e. if you ask to copy something weird,
4995 it will work, but the state of structures probably will not be
4996 correct. */
4998 for (i = 0; i < n_region; i++)
5000 /* We do not handle subloops, i.e. all the blocks must belong to the
5001 same loop. */
5002 if (region[i]->loop_father != loop)
5003 return false;
5005 if (region[i] != entry->dest
5006 && region[i] == loop->header)
5007 return false;
5010 loop->copy = loop;
5012 /* In case the function is used for loop header copying (which is the primary
5013 use), ensure that EXIT and its copy will be new latch and entry edges. */
5014 if (loop->header == entry->dest)
5016 copying_header = true;
5017 loop->copy = loop->outer;
5019 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5020 return false;
5022 for (i = 0; i < n_region; i++)
5023 if (region[i] != exit->src
5024 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5025 return false;
5028 if (!region_copy)
5030 region_copy = xmalloc (sizeof (basic_block) * n_region);
5031 free_region_copy = true;
5034 gcc_assert (!any_marked_for_rewrite_p ());
5036 /* Record blocks outside the region that are duplicated by something
5037 inside. */
5038 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
5039 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
5041 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
5042 definitions = marked_ssa_names ();
5044 if (copying_header)
5046 loop->header = exit->dest;
5047 loop->latch = exit->src;
5050 /* Redirect the entry and add the phi node arguments. */
5051 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
5052 gcc_assert (redirected != NULL);
5053 flush_pending_stmts (entry);
5055 /* Concerning updating of dominators: We must recount dominators
5056 for entry block and its copy. Anything that is outside of the region, but
5057 was dominated by something inside needs recounting as well. */
5058 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5059 doms[n_doms++] = entry->dest->rbi->original;
5060 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
5061 free (doms);
5063 /* Add the other phi node arguments. */
5064 add_phi_args_after_copy (region_copy, n_region);
5066 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
5067 uses, it should be possible to emit phi nodes just for definitions that
5068 are used outside region. */
5069 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
5071 tree name = ssa_name (ver);
5073 phi = create_phi_node (name, exit->dest);
5074 add_phi_arg (phi, name, exit);
5075 add_phi_arg (phi, name, exit_copy);
5077 SSA_NAME_DEF_STMT (name) = phi;
5080 /* And create new definitions inside region and its copy. TODO -- once we
5081 have immediate uses, it might be better to leave definitions in region
5082 unchanged, create new ssa names for phi nodes on exit, and rewrite
5083 the uses, to avoid changing the copied region. */
5084 allocate_ssa_names (definitions, &ssa_name_map);
5085 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
5086 allocate_ssa_names (definitions, &ssa_name_map);
5087 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
5088 htab_delete (ssa_name_map);
5090 if (free_region_copy)
5091 free (region_copy);
5093 unmark_all_for_rewrite ();
5094 BITMAP_FREE (definitions);
5096 return true;
5099 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
5101 void
5102 dump_function_to_file (tree fn, FILE *file, int flags)
5104 tree arg, vars, var;
5105 bool ignore_topmost_bind = false, any_var = false;
5106 basic_block bb;
5107 tree chain;
5109 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5111 arg = DECL_ARGUMENTS (fn);
5112 while (arg)
5114 print_generic_expr (file, arg, dump_flags);
5115 if (TREE_CHAIN (arg))
5116 fprintf (file, ", ");
5117 arg = TREE_CHAIN (arg);
5119 fprintf (file, ")\n");
5121 if (flags & TDF_RAW)
5123 dump_node (fn, TDF_SLIM | flags, file);
5124 return;
5127 /* When GIMPLE is lowered, the variables are no longer available in
5128 BIND_EXPRs, so display them separately. */
5129 if (cfun && cfun->unexpanded_var_list)
5131 ignore_topmost_bind = true;
5133 fprintf (file, "{\n");
5134 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5136 var = TREE_VALUE (vars);
5138 print_generic_decl (file, var, flags);
5139 fprintf (file, "\n");
5141 any_var = true;
5145 if (basic_block_info)
5147 /* Make a CFG based dump. */
5148 check_bb_profile (ENTRY_BLOCK_PTR, file);
5149 if (!ignore_topmost_bind)
5150 fprintf (file, "{\n");
5152 if (any_var && n_basic_blocks)
5153 fprintf (file, "\n");
5155 FOR_EACH_BB (bb)
5156 dump_generic_bb (file, bb, 2, flags);
5158 fprintf (file, "}\n");
5159 check_bb_profile (EXIT_BLOCK_PTR, file);
5161 else
5163 int indent;
5165 /* Make a tree based dump. */
5166 chain = DECL_SAVED_TREE (fn);
5168 if (TREE_CODE (chain) == BIND_EXPR)
5170 if (ignore_topmost_bind)
5172 chain = BIND_EXPR_BODY (chain);
5173 indent = 2;
5175 else
5176 indent = 0;
5178 else
5180 if (!ignore_topmost_bind)
5181 fprintf (file, "{\n");
5182 indent = 2;
5185 if (any_var)
5186 fprintf (file, "\n");
5188 print_generic_stmt_indented (file, chain, flags, indent);
5189 if (ignore_topmost_bind)
5190 fprintf (file, "}\n");
5193 fprintf (file, "\n\n");
5197 /* Pretty print of the loops intermediate representation. */
5198 static void print_loop (FILE *, struct loop *, int);
5199 static void print_pred_bbs (FILE *, basic_block bb);
5200 static void print_succ_bbs (FILE *, basic_block bb);
5203 /* Print the predecessors indexes of edge E on FILE. */
5205 static void
5206 print_pred_bbs (FILE *file, basic_block bb)
5208 edge e;
5209 edge_iterator ei;
5211 FOR_EACH_EDGE (e, ei, bb->preds)
5212 fprintf (file, "bb_%d", e->src->index);
5216 /* Print the successors indexes of edge E on FILE. */
5218 static void
5219 print_succ_bbs (FILE *file, basic_block bb)
5221 edge e;
5222 edge_iterator ei;
5224 FOR_EACH_EDGE (e, ei, bb->succs)
5225 fprintf (file, "bb_%d", e->src->index);
5229 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5231 static void
5232 print_loop (FILE *file, struct loop *loop, int indent)
5234 char *s_indent;
5235 basic_block bb;
5237 if (loop == NULL)
5238 return;
5240 s_indent = (char *) alloca ((size_t) indent + 1);
5241 memset ((void *) s_indent, ' ', (size_t) indent);
5242 s_indent[indent] = '\0';
5244 /* Print the loop's header. */
5245 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5247 /* Print the loop's body. */
5248 fprintf (file, "%s{\n", s_indent);
5249 FOR_EACH_BB (bb)
5250 if (bb->loop_father == loop)
5252 /* Print the basic_block's header. */
5253 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5254 print_pred_bbs (file, bb);
5255 fprintf (file, "}, succs = {");
5256 print_succ_bbs (file, bb);
5257 fprintf (file, "})\n");
5259 /* Print the basic_block's body. */
5260 fprintf (file, "%s {\n", s_indent);
5261 tree_dump_bb (bb, file, indent + 4);
5262 fprintf (file, "%s }\n", s_indent);
5265 print_loop (file, loop->inner, indent + 2);
5266 fprintf (file, "%s}\n", s_indent);
5267 print_loop (file, loop->next, indent);
5271 /* Follow a CFG edge from the entry point of the program, and on entry
5272 of a loop, pretty print the loop structure on FILE. */
5274 void
5275 print_loop_ir (FILE *file)
5277 basic_block bb;
5279 bb = BASIC_BLOCK (0);
5280 if (bb && bb->loop_father)
5281 print_loop (file, bb->loop_father, 0);
5285 /* Debugging loops structure at tree level. */
5287 void
5288 debug_loop_ir (void)
5290 print_loop_ir (stderr);
5294 /* Return true if BB ends with a call, possibly followed by some
5295 instructions that must stay with the call. Return false,
5296 otherwise. */
5298 static bool
5299 tree_block_ends_with_call_p (basic_block bb)
5301 block_stmt_iterator bsi = bsi_last (bb);
5302 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5306 /* Return true if BB ends with a conditional branch. Return false,
5307 otherwise. */
5309 static bool
5310 tree_block_ends_with_condjump_p (basic_block bb)
5312 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5313 return (TREE_CODE (stmt) == COND_EXPR);
5317 /* Return true if we need to add fake edge to exit at statement T.
5318 Helper function for tree_flow_call_edges_add. */
5320 static bool
5321 need_fake_edge_p (tree t)
5323 tree call;
5325 /* NORETURN and LONGJMP calls already have an edge to exit.
5326 CONST and PURE calls do not need one.
5327 We don't currently check for CONST and PURE here, although
5328 it would be a good idea, because those attributes are
5329 figured out from the RTL in mark_constant_function, and
5330 the counter incrementation code from -fprofile-arcs
5331 leads to different results from -fbranch-probabilities. */
5332 call = get_call_expr_in (t);
5333 if (call
5334 && !(call_expr_flags (call) & ECF_NORETURN))
5335 return true;
5337 if (TREE_CODE (t) == ASM_EXPR
5338 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5339 return true;
5341 return false;
5345 /* Add fake edges to the function exit for any non constant and non
5346 noreturn calls, volatile inline assembly in the bitmap of blocks
5347 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5348 the number of blocks that were split.
5350 The goal is to expose cases in which entering a basic block does
5351 not imply that all subsequent instructions must be executed. */
5353 static int
5354 tree_flow_call_edges_add (sbitmap blocks)
5356 int i;
5357 int blocks_split = 0;
5358 int last_bb = last_basic_block;
5359 bool check_last_block = false;
5361 if (n_basic_blocks == 0)
5362 return 0;
5364 if (! blocks)
5365 check_last_block = true;
5366 else
5367 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5369 /* In the last basic block, before epilogue generation, there will be
5370 a fallthru edge to EXIT. Special care is required if the last insn
5371 of the last basic block is a call because make_edge folds duplicate
5372 edges, which would result in the fallthru edge also being marked
5373 fake, which would result in the fallthru edge being removed by
5374 remove_fake_edges, which would result in an invalid CFG.
5376 Moreover, we can't elide the outgoing fake edge, since the block
5377 profiler needs to take this into account in order to solve the minimal
5378 spanning tree in the case that the call doesn't return.
5380 Handle this by adding a dummy instruction in a new last basic block. */
5381 if (check_last_block)
5383 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5384 block_stmt_iterator bsi = bsi_last (bb);
5385 tree t = NULL_TREE;
5386 if (!bsi_end_p (bsi))
5387 t = bsi_stmt (bsi);
5389 if (need_fake_edge_p (t))
5391 edge e;
5393 e = find_edge (bb, EXIT_BLOCK_PTR);
5394 if (e)
5396 bsi_insert_on_edge (e, build_empty_stmt ());
5397 bsi_commit_edge_inserts ();
5402 /* Now add fake edges to the function exit for any non constant
5403 calls since there is no way that we can determine if they will
5404 return or not... */
5405 for (i = 0; i < last_bb; i++)
5407 basic_block bb = BASIC_BLOCK (i);
5408 block_stmt_iterator bsi;
5409 tree stmt, last_stmt;
5411 if (!bb)
5412 continue;
5414 if (blocks && !TEST_BIT (blocks, i))
5415 continue;
5417 bsi = bsi_last (bb);
5418 if (!bsi_end_p (bsi))
5420 last_stmt = bsi_stmt (bsi);
5423 stmt = bsi_stmt (bsi);
5424 if (need_fake_edge_p (stmt))
5426 edge e;
5427 /* The handling above of the final block before the
5428 epilogue should be enough to verify that there is
5429 no edge to the exit block in CFG already.
5430 Calling make_edge in such case would cause us to
5431 mark that edge as fake and remove it later. */
5432 #ifdef ENABLE_CHECKING
5433 if (stmt == last_stmt)
5435 e = find_edge (bb, EXIT_BLOCK_PTR);
5436 gcc_assert (e == NULL);
5438 #endif
5440 /* Note that the following may create a new basic block
5441 and renumber the existing basic blocks. */
5442 if (stmt != last_stmt)
5444 e = split_block (bb, stmt);
5445 if (e)
5446 blocks_split++;
5448 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5450 bsi_prev (&bsi);
5452 while (!bsi_end_p (bsi));
5456 if (blocks_split)
5457 verify_flow_info ();
5459 return blocks_split;
5462 bool
5463 tree_purge_dead_eh_edges (basic_block bb)
5465 bool changed = false;
5466 edge e;
5467 edge_iterator ei;
5468 tree stmt = last_stmt (bb);
5470 if (stmt && tree_can_throw_internal (stmt))
5471 return false;
5473 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5475 if (e->flags & EDGE_EH)
5477 remove_edge (e);
5478 changed = true;
5480 else
5481 ei_next (&ei);
5484 /* Removal of dead EH edges might change dominators of not
5485 just immediate successors. E.g. when bb1 is changed so that
5486 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5487 eh edges purged by this function in:
5491 1-->2
5492 / \ |
5493 v v |
5494 3-->4 |
5496 --->5
5499 idom(bb5) must be recomputed. For now just free the dominance
5500 info. */
5501 if (changed)
5502 free_dominance_info (CDI_DOMINATORS);
5504 return changed;
5507 bool
5508 tree_purge_all_dead_eh_edges (bitmap blocks)
5510 bool changed = false;
5511 unsigned i;
5512 bitmap_iterator bi;
5514 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5516 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5519 return changed;
5522 /* This function is called whenever a new edge is created or
5523 redirected. */
5525 static void
5526 tree_execute_on_growing_pred (edge e)
5528 basic_block bb = e->dest;
5530 if (phi_nodes (bb))
5531 reserve_phi_args_for_new_edge (bb);
5534 /* This function is called immediately before edge E is removed from
5535 the edge vector E->dest->preds. */
5537 static void
5538 tree_execute_on_shrinking_pred (edge e)
5540 if (phi_nodes (e->dest))
5541 remove_phi_args (e);
5544 struct cfg_hooks tree_cfg_hooks = {
5545 "tree",
5546 tree_verify_flow_info,
5547 tree_dump_bb, /* dump_bb */
5548 create_bb, /* create_basic_block */
5549 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5550 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5551 remove_bb, /* delete_basic_block */
5552 tree_split_block, /* split_block */
5553 tree_move_block_after, /* move_block_after */
5554 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5555 tree_merge_blocks, /* merge_blocks */
5556 tree_predict_edge, /* predict_edge */
5557 tree_predicted_by_p, /* predicted_by_p */
5558 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5559 tree_duplicate_bb, /* duplicate_block */
5560 tree_split_edge, /* split_edge */
5561 tree_make_forwarder_block, /* make_forward_block */
5562 NULL, /* tidy_fallthru_edge */
5563 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5564 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5565 tree_flow_call_edges_add, /* flow_call_edges_add */
5566 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5567 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5571 /* Split all critical edges. */
5573 static void
5574 split_critical_edges (void)
5576 basic_block bb;
5577 edge e;
5578 edge_iterator ei;
5580 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5581 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5582 mappings around the calls to split_edge. */
5583 start_recording_case_labels ();
5584 FOR_ALL_BB (bb)
5586 FOR_EACH_EDGE (e, ei, bb->succs)
5587 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5589 split_edge (e);
5592 end_recording_case_labels ();
5595 struct tree_opt_pass pass_split_crit_edges =
5597 "crited", /* name */
5598 NULL, /* gate */
5599 split_critical_edges, /* execute */
5600 NULL, /* sub */
5601 NULL, /* next */
5602 0, /* static_pass_number */
5603 TV_TREE_SPLIT_EDGES, /* tv_id */
5604 PROP_cfg, /* properties required */
5605 PROP_no_crit_edges, /* properties_provided */
5606 0, /* properties_destroyed */
5607 0, /* todo_flags_start */
5608 TODO_dump_func, /* todo_flags_finish */
5609 0 /* letter */
5613 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5614 a temporary, make sure and register it to be renamed if necessary,
5615 and finally return the temporary. Put the statements to compute
5616 EXP before the current statement in BSI. */
5618 tree
5619 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5621 tree t, new_stmt, orig_stmt;
5623 if (is_gimple_val (exp))
5624 return exp;
5626 t = make_rename_temp (type, NULL);
5627 new_stmt = build (MODIFY_EXPR, type, t, exp);
5629 orig_stmt = bsi_stmt (*bsi);
5630 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5631 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5633 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5635 return t;
5638 /* Build a ternary operation and gimplify it. Emit code before BSI.
5639 Return the gimple_val holding the result. */
5641 tree
5642 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5643 tree type, tree a, tree b, tree c)
5645 tree ret;
5647 ret = fold (build3 (code, type, a, b, c));
5648 STRIP_NOPS (ret);
5650 return gimplify_val (bsi, type, ret);
5653 /* Build a binary operation and gimplify it. Emit code before BSI.
5654 Return the gimple_val holding the result. */
5656 tree
5657 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5658 tree type, tree a, tree b)
5660 tree ret;
5662 ret = fold (build2 (code, type, a, b));
5663 STRIP_NOPS (ret);
5665 return gimplify_val (bsi, type, ret);
5668 /* Build a unary operation and gimplify it. Emit code before BSI.
5669 Return the gimple_val holding the result. */
5671 tree
5672 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5673 tree a)
5675 tree ret;
5677 ret = fold (build1 (code, type, a));
5678 STRIP_NOPS (ret);
5680 return gimplify_val (bsi, type, ret);
5685 /* Emit return warnings. */
5687 static void
5688 execute_warn_function_return (void)
5690 #ifdef USE_MAPPED_LOCATION
5691 source_location location;
5692 #else
5693 location_t *locus;
5694 #endif
5695 tree last;
5696 edge e;
5697 edge_iterator ei;
5699 if (warn_missing_noreturn
5700 && !TREE_THIS_VOLATILE (cfun->decl)
5701 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5702 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5703 warning ("%Jfunction might be possible candidate for "
5704 "attribute %<noreturn%>",
5705 cfun->decl);
5707 /* If we have a path to EXIT, then we do return. */
5708 if (TREE_THIS_VOLATILE (cfun->decl)
5709 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5711 #ifdef USE_MAPPED_LOCATION
5712 location = UNKNOWN_LOCATION;
5713 #else
5714 locus = NULL;
5715 #endif
5716 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5718 last = last_stmt (e->src);
5719 if (TREE_CODE (last) == RETURN_EXPR
5720 #ifdef USE_MAPPED_LOCATION
5721 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5722 #else
5723 && (locus = EXPR_LOCUS (last)) != NULL)
5724 #endif
5725 break;
5727 #ifdef USE_MAPPED_LOCATION
5728 if (location == UNKNOWN_LOCATION)
5729 location = cfun->function_end_locus;
5730 warning ("%H%<noreturn%> function does return", &location);
5731 #else
5732 if (!locus)
5733 locus = &cfun->function_end_locus;
5734 warning ("%H%<noreturn%> function does return", locus);
5735 #endif
5738 /* If we see "return;" in some basic block, then we do reach the end
5739 without returning a value. */
5740 else if (warn_return_type
5741 && !TREE_NO_WARNING (cfun->decl)
5742 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5743 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5745 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5747 tree last = last_stmt (e->src);
5748 if (TREE_CODE (last) == RETURN_EXPR
5749 && TREE_OPERAND (last, 0) == NULL)
5751 #ifdef USE_MAPPED_LOCATION
5752 location = EXPR_LOCATION (last);
5753 if (location == UNKNOWN_LOCATION)
5754 location = cfun->function_end_locus;
5755 warning ("%Hcontrol reaches end of non-void function", &location);
5756 #else
5757 locus = EXPR_LOCUS (last);
5758 if (!locus)
5759 locus = &cfun->function_end_locus;
5760 warning ("%Hcontrol reaches end of non-void function", locus);
5761 #endif
5762 TREE_NO_WARNING (cfun->decl) = 1;
5763 break;
5770 /* Given a basic block B which ends with a conditional and has
5771 precisely two successors, determine which of the edges is taken if
5772 the conditional is true and which is taken if the conditional is
5773 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5775 void
5776 extract_true_false_edges_from_block (basic_block b,
5777 edge *true_edge,
5778 edge *false_edge)
5780 edge e = EDGE_SUCC (b, 0);
5782 if (e->flags & EDGE_TRUE_VALUE)
5784 *true_edge = e;
5785 *false_edge = EDGE_SUCC (b, 1);
5787 else
5789 *false_edge = e;
5790 *true_edge = EDGE_SUCC (b, 1);
5794 struct tree_opt_pass pass_warn_function_return =
5796 NULL, /* name */
5797 NULL, /* gate */
5798 execute_warn_function_return, /* execute */
5799 NULL, /* sub */
5800 NULL, /* next */
5801 0, /* static_pass_number */
5802 0, /* tv_id */
5803 PROP_cfg, /* properties_required */
5804 0, /* properties_provided */
5805 0, /* properties_destroyed */
5806 0, /* todo_flags_start */
5807 0, /* todo_flags_finish */
5808 0 /* letter */
5811 #include "gt-tree-cfg.h"