2013-03-26 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-tail-merge.c
blobf2ab744403a6e44909a348963e7e0d12679e228d
1 /* Tail merging for gimple.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Tom de Vries (tom@codesourcery.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Pass overview.
24 MOTIVATIONAL EXAMPLE
26 gimple representation of gcc/testsuite/gcc.dg/pr43864.c at
28 hprofStartupp (charD.1 * outputFileNameD.2600, charD.1 * ctxD.2601)
30 struct FILED.1638 * fpD.2605;
31 charD.1 fileNameD.2604[1000];
32 intD.0 D.3915;
33 const charD.1 * restrict outputFileName.0D.3914;
35 # BLOCK 2 freq:10000
36 # PRED: ENTRY [100.0%] (fallthru,exec)
37 # PT = nonlocal { D.3926 } (restr)
38 outputFileName.0D.3914_3
39 = (const charD.1 * restrict) outputFileNameD.2600_2(D);
40 # .MEMD.3923_13 = VDEF <.MEMD.3923_12(D)>
41 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
42 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
43 sprintfD.759 (&fileNameD.2604, outputFileName.0D.3914_3);
44 # .MEMD.3923_14 = VDEF <.MEMD.3923_13>
45 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
46 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
47 D.3915_4 = accessD.2606 (&fileNameD.2604, 1);
48 if (D.3915_4 == 0)
49 goto <bb 3>;
50 else
51 goto <bb 4>;
52 # SUCC: 3 [10.0%] (true,exec) 4 [90.0%] (false,exec)
54 # BLOCK 3 freq:1000
55 # PRED: 2 [10.0%] (true,exec)
56 # .MEMD.3923_15 = VDEF <.MEMD.3923_14>
57 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
58 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
59 freeD.898 (ctxD.2601_5(D));
60 goto <bb 7>;
61 # SUCC: 7 [100.0%] (fallthru,exec)
63 # BLOCK 4 freq:9000
64 # PRED: 2 [90.0%] (false,exec)
65 # .MEMD.3923_16 = VDEF <.MEMD.3923_14>
66 # PT = nonlocal escaped
67 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
68 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
69 fpD.2605_8 = fopenD.1805 (&fileNameD.2604[0], 0B);
70 if (fpD.2605_8 == 0B)
71 goto <bb 5>;
72 else
73 goto <bb 6>;
74 # SUCC: 5 [1.9%] (true,exec) 6 [98.1%] (false,exec)
76 # BLOCK 5 freq:173
77 # PRED: 4 [1.9%] (true,exec)
78 # .MEMD.3923_17 = VDEF <.MEMD.3923_16>
79 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
80 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
81 freeD.898 (ctxD.2601_5(D));
82 goto <bb 7>;
83 # SUCC: 7 [100.0%] (fallthru,exec)
85 # BLOCK 6 freq:8827
86 # PRED: 4 [98.1%] (false,exec)
87 # .MEMD.3923_18 = VDEF <.MEMD.3923_16>
88 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
89 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
90 fooD.2599 (outputFileNameD.2600_2(D), fpD.2605_8);
91 # SUCC: 7 [100.0%] (fallthru,exec)
93 # BLOCK 7 freq:10000
94 # PRED: 3 [100.0%] (fallthru,exec) 5 [100.0%] (fallthru,exec)
95 6 [100.0%] (fallthru,exec)
96 # PT = nonlocal null
98 # ctxD.2601_1 = PHI <0B(3), 0B(5), ctxD.2601_5(D)(6)>
99 # .MEMD.3923_11 = PHI <.MEMD.3923_15(3), .MEMD.3923_17(5),
100 .MEMD.3923_18(6)>
101 # VUSE <.MEMD.3923_11>
102 return ctxD.2601_1;
103 # SUCC: EXIT [100.0%]
106 bb 3 and bb 5 can be merged. The blocks have different predecessors, but the
107 same successors, and the same operations.
110 CONTEXT
112 A technique called tail merging (or cross jumping) can fix the example
113 above. For a block, we look for common code at the end (the tail) of the
114 predecessor blocks, and insert jumps from one block to the other.
115 The example is a special case for tail merging, in that 2 whole blocks
116 can be merged, rather than just the end parts of it.
117 We currently only focus on whole block merging, so in that sense
118 calling this pass tail merge is a bit of a misnomer.
120 We distinguish 2 kinds of situations in which blocks can be merged:
121 - same operations, same predecessors. The successor edges coming from one
122 block are redirected to come from the other block.
123 - same operations, same successors. The predecessor edges entering one block
124 are redirected to enter the other block. Note that this operation might
125 involve introducing phi operations.
127 For efficient implementation, we would like to value numbers the blocks, and
128 have a comparison operator that tells us whether the blocks are equal.
129 Besides being runtime efficient, block value numbering should also abstract
130 from irrelevant differences in order of operations, much like normal value
131 numbering abstracts from irrelevant order of operations.
133 For the first situation (same_operations, same predecessors), normal value
134 numbering fits well. We can calculate a block value number based on the
135 value numbers of the defs and vdefs.
137 For the second situation (same operations, same successors), this approach
138 doesn't work so well. We can illustrate this using the example. The calls
139 to free use different vdefs: MEMD.3923_16 and MEMD.3923_14, and these will
140 remain different in value numbering, since they represent different memory
141 states. So the resulting vdefs of the frees will be different in value
142 numbering, so the block value numbers will be different.
144 The reason why we call the blocks equal is not because they define the same
145 values, but because uses in the blocks use (possibly different) defs in the
146 same way. To be able to detect this efficiently, we need to do some kind of
147 reverse value numbering, meaning number the uses rather than the defs, and
148 calculate a block value number based on the value number of the uses.
149 Ideally, a block comparison operator will also indicate which phis are needed
150 to merge the blocks.
152 For the moment, we don't do block value numbering, but we do insn-by-insn
153 matching, using scc value numbers to match operations with results, and
154 structural comparison otherwise, while ignoring vop mismatches.
157 IMPLEMENTATION
159 1. The pass first determines all groups of blocks with the same successor
160 blocks.
161 2. Within each group, it tries to determine clusters of equal basic blocks.
162 3. The clusters are applied.
163 4. The same successor groups are updated.
164 5. This process is repeated from 2 onwards, until no more changes.
167 LIMITATIONS/TODO
169 - block only
170 - handles only 'same operations, same successors'.
171 It handles same predecessors as a special subcase though.
172 - does not implement the reverse value numbering and block value numbering.
173 - improve memory allocation: use garbage collected memory, obstacks,
174 allocpools where appropriate.
175 - no insertion of gimple_reg phis, We only introduce vop-phis.
176 - handle blocks with gimple_reg phi_nodes.
179 SWITCHES
181 - ftree-tail-merge. On at -O2. We may have to enable it only at -Os. */
183 #include "config.h"
184 #include "system.h"
185 #include "coretypes.h"
186 #include "tm.h"
187 #include "tree.h"
188 #include "tm_p.h"
189 #include "basic-block.h"
190 #include "flags.h"
191 #include "function.h"
192 #include "tree-flow.h"
193 #include "bitmap.h"
194 #include "tree-ssa-alias.h"
195 #include "params.h"
196 #include "hash-table.h"
197 #include "gimple-pretty-print.h"
198 #include "tree-ssa-sccvn.h"
199 #include "tree-dump.h"
200 #include "cfgloop.h"
202 /* ??? This currently runs as part of tree-ssa-pre. Why is this not
203 a stand-alone GIMPLE pass? */
204 #include "tree-pass.h"
206 /* Describes a group of bbs with the same successors. The successor bbs are
207 cached in succs, and the successor edge flags are cached in succ_flags.
208 If a bb has the EDGE_TRUE/VALSE_VALUE flags swapped compared to succ_flags,
209 it's marked in inverse.
210 Additionally, the hash value for the struct is cached in hashval, and
211 in_worklist indicates whether it's currently part of worklist. */
213 struct same_succ_def
215 /* The bbs that have the same successor bbs. */
216 bitmap bbs;
217 /* The successor bbs. */
218 bitmap succs;
219 /* Indicates whether the EDGE_TRUE/FALSE_VALUEs of succ_flags are swapped for
220 bb. */
221 bitmap inverse;
222 /* The edge flags for each of the successor bbs. */
223 vec<int> succ_flags;
224 /* Indicates whether the struct is currently in the worklist. */
225 bool in_worklist;
226 /* The hash value of the struct. */
227 hashval_t hashval;
229 /* hash_table support. */
230 typedef same_succ_def value_type;
231 typedef same_succ_def compare_type;
232 static inline hashval_t hash (const value_type *);
233 static int equal (const value_type *, const compare_type *);
234 static void remove (value_type *);
236 typedef struct same_succ_def *same_succ;
237 typedef const struct same_succ_def *const_same_succ;
239 /* hash routine for hash_table support, returns hashval of E. */
241 inline hashval_t
242 same_succ_def::hash (const value_type *e)
244 return e->hashval;
247 /* A group of bbs where 1 bb from bbs can replace the other bbs. */
249 struct bb_cluster_def
251 /* The bbs in the cluster. */
252 bitmap bbs;
253 /* The preds of the bbs in the cluster. */
254 bitmap preds;
255 /* Index in all_clusters vector. */
256 int index;
257 /* The bb to replace the cluster with. */
258 basic_block rep_bb;
260 typedef struct bb_cluster_def *bb_cluster;
261 typedef const struct bb_cluster_def *const_bb_cluster;
263 /* Per bb-info. */
265 struct aux_bb_info
267 /* The number of non-debug statements in the bb. */
268 int size;
269 /* The same_succ that this bb is a member of. */
270 same_succ bb_same_succ;
271 /* The cluster that this bb is a member of. */
272 bb_cluster cluster;
273 /* The vop state at the exit of a bb. This is shortlived data, used to
274 communicate data between update_block_by and update_vuses. */
275 tree vop_at_exit;
276 /* The bb that either contains or is dominated by the dependencies of the
277 bb. */
278 basic_block dep_bb;
281 /* Macros to access the fields of struct aux_bb_info. */
283 #define BB_SIZE(bb) (((struct aux_bb_info *)bb->aux)->size)
284 #define BB_SAME_SUCC(bb) (((struct aux_bb_info *)bb->aux)->bb_same_succ)
285 #define BB_CLUSTER(bb) (((struct aux_bb_info *)bb->aux)->cluster)
286 #define BB_VOP_AT_EXIT(bb) (((struct aux_bb_info *)bb->aux)->vop_at_exit)
287 #define BB_DEP_BB(bb) (((struct aux_bb_info *)bb->aux)->dep_bb)
289 /* Returns true if the only effect a statement STMT has, is to define locally
290 used SSA_NAMEs. */
292 static bool
293 stmt_local_def (gimple stmt)
295 basic_block bb, def_bb;
296 imm_use_iterator iter;
297 use_operand_p use_p;
298 tree val;
299 def_operand_p def_p;
301 if (gimple_has_side_effects (stmt))
302 return false;
304 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
305 if (def_p == NULL)
306 return false;
308 val = DEF_FROM_PTR (def_p);
309 if (val == NULL_TREE || TREE_CODE (val) != SSA_NAME)
310 return false;
312 def_bb = gimple_bb (stmt);
314 FOR_EACH_IMM_USE_FAST (use_p, iter, val)
316 if (is_gimple_debug (USE_STMT (use_p)))
317 continue;
318 bb = gimple_bb (USE_STMT (use_p));
319 if (bb == def_bb)
320 continue;
322 if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI
323 && EDGE_PRED (bb, PHI_ARG_INDEX_FROM_USE (use_p))->src == def_bb)
324 continue;
326 return false;
329 return true;
332 /* Let GSI skip forwards over local defs. */
334 static void
335 gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
337 gimple stmt;
339 while (true)
341 if (gsi_end_p (*gsi))
342 return;
343 stmt = gsi_stmt (*gsi);
344 if (!stmt_local_def (stmt))
345 return;
346 gsi_next_nondebug (gsi);
350 /* VAL1 and VAL2 are either:
351 - uses in BB1 and BB2, or
352 - phi alternatives for BB1 and BB2.
353 Return true if the uses have the same gvn value. */
355 static bool
356 gvn_uses_equal (tree val1, tree val2)
358 gcc_checking_assert (val1 != NULL_TREE && val2 != NULL_TREE);
360 if (val1 == val2)
361 return true;
363 if (vn_valueize (val1) != vn_valueize (val2))
364 return false;
366 return ((TREE_CODE (val1) == SSA_NAME || CONSTANT_CLASS_P (val1))
367 && (TREE_CODE (val2) == SSA_NAME || CONSTANT_CLASS_P (val2)));
370 /* Prints E to FILE. */
372 static void
373 same_succ_print (FILE *file, const same_succ e)
375 unsigned int i;
376 bitmap_print (file, e->bbs, "bbs:", "\n");
377 bitmap_print (file, e->succs, "succs:", "\n");
378 bitmap_print (file, e->inverse, "inverse:", "\n");
379 fprintf (file, "flags:");
380 for (i = 0; i < e->succ_flags.length (); ++i)
381 fprintf (file, " %x", e->succ_flags[i]);
382 fprintf (file, "\n");
385 /* Prints same_succ VE to VFILE. */
387 inline int
388 ssa_same_succ_print_traverse (same_succ *pe, FILE *file)
390 const same_succ e = *pe;
391 same_succ_print (file, e);
392 return 1;
395 /* Update BB_DEP_BB (USE_BB), given a use of VAL in USE_BB. */
397 static void
398 update_dep_bb (basic_block use_bb, tree val)
400 basic_block dep_bb;
402 /* Not a dep. */
403 if (TREE_CODE (val) != SSA_NAME)
404 return;
406 /* Skip use of global def. */
407 if (SSA_NAME_IS_DEFAULT_DEF (val))
408 return;
410 /* Skip use of local def. */
411 dep_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
412 if (dep_bb == use_bb)
413 return;
415 if (BB_DEP_BB (use_bb) == NULL
416 || dominated_by_p (CDI_DOMINATORS, dep_bb, BB_DEP_BB (use_bb)))
417 BB_DEP_BB (use_bb) = dep_bb;
420 /* Update BB_DEP_BB, given the dependencies in STMT. */
422 static void
423 stmt_update_dep_bb (gimple stmt)
425 ssa_op_iter iter;
426 use_operand_p use;
428 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
429 update_dep_bb (gimple_bb (stmt), USE_FROM_PTR (use));
432 /* Calculates hash value for same_succ VE. */
434 static hashval_t
435 same_succ_hash (const_same_succ e)
437 hashval_t hashval = bitmap_hash (e->succs);
438 int flags;
439 unsigned int i;
440 unsigned int first = bitmap_first_set_bit (e->bbs);
441 basic_block bb = BASIC_BLOCK (first);
442 int size = 0;
443 gimple_stmt_iterator gsi;
444 gimple stmt;
445 tree arg;
446 unsigned int s;
447 bitmap_iterator bs;
449 for (gsi = gsi_start_nondebug_bb (bb);
450 !gsi_end_p (gsi); gsi_next_nondebug (&gsi))
452 stmt = gsi_stmt (gsi);
453 stmt_update_dep_bb (stmt);
454 if (stmt_local_def (stmt))
455 continue;
456 size++;
458 hashval = iterative_hash_hashval_t (gimple_code (stmt), hashval);
459 if (is_gimple_assign (stmt))
460 hashval = iterative_hash_hashval_t (gimple_assign_rhs_code (stmt),
461 hashval);
462 if (!is_gimple_call (stmt))
463 continue;
464 if (gimple_call_internal_p (stmt))
465 hashval = iterative_hash_hashval_t
466 ((hashval_t) gimple_call_internal_fn (stmt), hashval);
467 else
468 hashval = iterative_hash_expr (gimple_call_fn (stmt), hashval);
469 for (i = 0; i < gimple_call_num_args (stmt); i++)
471 arg = gimple_call_arg (stmt, i);
472 arg = vn_valueize (arg);
473 hashval = iterative_hash_expr (arg, hashval);
477 hashval = iterative_hash_hashval_t (size, hashval);
478 BB_SIZE (bb) = size;
480 for (i = 0; i < e->succ_flags.length (); ++i)
482 flags = e->succ_flags[i];
483 flags = flags & ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
484 hashval = iterative_hash_hashval_t (flags, hashval);
487 EXECUTE_IF_SET_IN_BITMAP (e->succs, 0, s, bs)
489 int n = find_edge (bb, BASIC_BLOCK (s))->dest_idx;
490 for (gsi = gsi_start_phis (BASIC_BLOCK (s)); !gsi_end_p (gsi);
491 gsi_next (&gsi))
493 gimple phi = gsi_stmt (gsi);
494 tree lhs = gimple_phi_result (phi);
495 tree val = gimple_phi_arg_def (phi, n);
497 if (virtual_operand_p (lhs))
498 continue;
499 update_dep_bb (bb, val);
503 return hashval;
506 /* Returns true if E1 and E2 have 2 successors, and if the successor flags
507 are inverse for the EDGE_TRUE_VALUE and EDGE_FALSE_VALUE flags, and equal for
508 the other edge flags. */
510 static bool
511 inverse_flags (const_same_succ e1, const_same_succ e2)
513 int f1a, f1b, f2a, f2b;
514 int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
516 if (e1->succ_flags.length () != 2)
517 return false;
519 f1a = e1->succ_flags[0];
520 f1b = e1->succ_flags[1];
521 f2a = e2->succ_flags[0];
522 f2b = e2->succ_flags[1];
524 if (f1a == f2a && f1b == f2b)
525 return false;
527 return (f1a & mask) == (f2a & mask) && (f1b & mask) == (f2b & mask);
530 /* Compares SAME_SUCCs E1 and E2. */
533 same_succ_def::equal (const value_type *e1, const compare_type *e2)
535 unsigned int i, first1, first2;
536 gimple_stmt_iterator gsi1, gsi2;
537 gimple s1, s2;
538 basic_block bb1, bb2;
540 if (e1->hashval != e2->hashval)
541 return 0;
543 if (e1->succ_flags.length () != e2->succ_flags.length ())
544 return 0;
546 if (!bitmap_equal_p (e1->succs, e2->succs))
547 return 0;
549 if (!inverse_flags (e1, e2))
551 for (i = 0; i < e1->succ_flags.length (); ++i)
552 if (e1->succ_flags[i] != e1->succ_flags[i])
553 return 0;
556 first1 = bitmap_first_set_bit (e1->bbs);
557 first2 = bitmap_first_set_bit (e2->bbs);
559 bb1 = BASIC_BLOCK (first1);
560 bb2 = BASIC_BLOCK (first2);
562 if (BB_SIZE (bb1) != BB_SIZE (bb2))
563 return 0;
565 gsi1 = gsi_start_nondebug_bb (bb1);
566 gsi2 = gsi_start_nondebug_bb (bb2);
567 gsi_advance_fw_nondebug_nonlocal (&gsi1);
568 gsi_advance_fw_nondebug_nonlocal (&gsi2);
569 while (!(gsi_end_p (gsi1) || gsi_end_p (gsi2)))
571 s1 = gsi_stmt (gsi1);
572 s2 = gsi_stmt (gsi2);
573 if (gimple_code (s1) != gimple_code (s2))
574 return 0;
575 if (is_gimple_call (s1) && !gimple_call_same_target_p (s1, s2))
576 return 0;
577 gsi_next_nondebug (&gsi1);
578 gsi_next_nondebug (&gsi2);
579 gsi_advance_fw_nondebug_nonlocal (&gsi1);
580 gsi_advance_fw_nondebug_nonlocal (&gsi2);
583 return 1;
586 /* Alloc and init a new SAME_SUCC. */
588 static same_succ
589 same_succ_alloc (void)
591 same_succ same = XNEW (struct same_succ_def);
593 same->bbs = BITMAP_ALLOC (NULL);
594 same->succs = BITMAP_ALLOC (NULL);
595 same->inverse = BITMAP_ALLOC (NULL);
596 same->succ_flags.create (10);
597 same->in_worklist = false;
599 return same;
602 /* Delete same_succ E. */
604 void
605 same_succ_def::remove (same_succ e)
607 BITMAP_FREE (e->bbs);
608 BITMAP_FREE (e->succs);
609 BITMAP_FREE (e->inverse);
610 e->succ_flags.release ();
612 XDELETE (e);
615 /* Reset same_succ SAME. */
617 static void
618 same_succ_reset (same_succ same)
620 bitmap_clear (same->bbs);
621 bitmap_clear (same->succs);
622 bitmap_clear (same->inverse);
623 same->succ_flags.truncate (0);
626 static hash_table <same_succ_def> same_succ_htab;
628 /* Array that is used to store the edge flags for a successor. */
630 static int *same_succ_edge_flags;
632 /* Bitmap that is used to mark bbs that are recently deleted. */
634 static bitmap deleted_bbs;
636 /* Bitmap that is used to mark predecessors of bbs that are
637 deleted. */
639 static bitmap deleted_bb_preds;
641 /* Prints same_succ_htab to stderr. */
643 extern void debug_same_succ (void);
644 DEBUG_FUNCTION void
645 debug_same_succ ( void)
647 same_succ_htab.traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
651 /* Vector of bbs to process. */
653 static vec<same_succ> worklist;
655 /* Prints worklist to FILE. */
657 static void
658 print_worklist (FILE *file)
660 unsigned int i;
661 for (i = 0; i < worklist.length (); ++i)
662 same_succ_print (file, worklist[i]);
665 /* Adds SAME to worklist. */
667 static void
668 add_to_worklist (same_succ same)
670 if (same->in_worklist)
671 return;
673 if (bitmap_count_bits (same->bbs) < 2)
674 return;
676 same->in_worklist = true;
677 worklist.safe_push (same);
680 /* Add BB to same_succ_htab. */
682 static void
683 find_same_succ_bb (basic_block bb, same_succ *same_p)
685 unsigned int j;
686 bitmap_iterator bj;
687 same_succ same = *same_p;
688 same_succ *slot;
689 edge_iterator ei;
690 edge e;
692 if (bb == NULL)
693 return;
694 bitmap_set_bit (same->bbs, bb->index);
695 FOR_EACH_EDGE (e, ei, bb->succs)
697 int index = e->dest->index;
698 bitmap_set_bit (same->succs, index);
699 same_succ_edge_flags[index] = e->flags;
701 EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
702 same->succ_flags.safe_push (same_succ_edge_flags[j]);
704 same->hashval = same_succ_hash (same);
706 slot = same_succ_htab.find_slot_with_hash (same, same->hashval, INSERT);
707 if (*slot == NULL)
709 *slot = same;
710 BB_SAME_SUCC (bb) = same;
711 add_to_worklist (same);
712 *same_p = NULL;
714 else
716 bitmap_set_bit ((*slot)->bbs, bb->index);
717 BB_SAME_SUCC (bb) = *slot;
718 add_to_worklist (*slot);
719 if (inverse_flags (same, *slot))
720 bitmap_set_bit ((*slot)->inverse, bb->index);
721 same_succ_reset (same);
725 /* Find bbs with same successors. */
727 static void
728 find_same_succ (void)
730 same_succ same = same_succ_alloc ();
731 basic_block bb;
733 FOR_EACH_BB (bb)
735 find_same_succ_bb (bb, &same);
736 if (same == NULL)
737 same = same_succ_alloc ();
740 same_succ_def::remove (same);
743 /* Initializes worklist administration. */
745 static void
746 init_worklist (void)
748 alloc_aux_for_blocks (sizeof (struct aux_bb_info));
749 same_succ_htab.create (n_basic_blocks);
750 same_succ_edge_flags = XCNEWVEC (int, last_basic_block);
751 deleted_bbs = BITMAP_ALLOC (NULL);
752 deleted_bb_preds = BITMAP_ALLOC (NULL);
753 worklist.create (n_basic_blocks);
754 find_same_succ ();
756 if (dump_file && (dump_flags & TDF_DETAILS))
758 fprintf (dump_file, "initial worklist:\n");
759 print_worklist (dump_file);
763 /* Deletes worklist administration. */
765 static void
766 delete_worklist (void)
768 free_aux_for_blocks ();
769 same_succ_htab.dispose ();
770 XDELETEVEC (same_succ_edge_flags);
771 same_succ_edge_flags = NULL;
772 BITMAP_FREE (deleted_bbs);
773 BITMAP_FREE (deleted_bb_preds);
774 worklist.release ();
777 /* Mark BB as deleted, and mark its predecessors. */
779 static void
780 mark_basic_block_deleted (basic_block bb)
782 edge e;
783 edge_iterator ei;
785 bitmap_set_bit (deleted_bbs, bb->index);
787 FOR_EACH_EDGE (e, ei, bb->preds)
788 bitmap_set_bit (deleted_bb_preds, e->src->index);
791 /* Removes BB from its corresponding same_succ. */
793 static void
794 same_succ_flush_bb (basic_block bb)
796 same_succ same = BB_SAME_SUCC (bb);
797 BB_SAME_SUCC (bb) = NULL;
798 if (bitmap_single_bit_set_p (same->bbs))
799 same_succ_htab.remove_elt_with_hash (same, same->hashval);
800 else
801 bitmap_clear_bit (same->bbs, bb->index);
804 /* Removes all bbs in BBS from their corresponding same_succ. */
806 static void
807 same_succ_flush_bbs (bitmap bbs)
809 unsigned int i;
810 bitmap_iterator bi;
812 EXECUTE_IF_SET_IN_BITMAP (bbs, 0, i, bi)
813 same_succ_flush_bb (BASIC_BLOCK (i));
816 /* Release the last vdef in BB, either normal or phi result. */
818 static void
819 release_last_vdef (basic_block bb)
821 gimple_stmt_iterator i;
823 for (i = gsi_last_bb (bb); !gsi_end_p (i); gsi_prev_nondebug (&i))
825 gimple stmt = gsi_stmt (i);
826 if (gimple_vdef (stmt) == NULL_TREE)
827 continue;
829 mark_virtual_operand_for_renaming (gimple_vdef (stmt));
830 return;
833 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
835 gimple phi = gsi_stmt (i);
836 tree res = gimple_phi_result (phi);
838 if (!virtual_operand_p (res))
839 continue;
841 mark_virtual_phi_result_for_renaming (phi);
842 return;
847 /* For deleted_bb_preds, find bbs with same successors. */
849 static void
850 update_worklist (void)
852 unsigned int i;
853 bitmap_iterator bi;
854 basic_block bb;
855 same_succ same;
857 bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
858 bitmap_clear (deleted_bbs);
860 bitmap_clear_bit (deleted_bb_preds, ENTRY_BLOCK);
861 same_succ_flush_bbs (deleted_bb_preds);
863 same = same_succ_alloc ();
864 EXECUTE_IF_SET_IN_BITMAP (deleted_bb_preds, 0, i, bi)
866 bb = BASIC_BLOCK (i);
867 gcc_assert (bb != NULL);
868 find_same_succ_bb (bb, &same);
869 if (same == NULL)
870 same = same_succ_alloc ();
872 same_succ_def::remove (same);
873 bitmap_clear (deleted_bb_preds);
876 /* Prints cluster C to FILE. */
878 static void
879 print_cluster (FILE *file, bb_cluster c)
881 if (c == NULL)
882 return;
883 bitmap_print (file, c->bbs, "bbs:", "\n");
884 bitmap_print (file, c->preds, "preds:", "\n");
887 /* Prints cluster C to stderr. */
889 extern void debug_cluster (bb_cluster);
890 DEBUG_FUNCTION void
891 debug_cluster (bb_cluster c)
893 print_cluster (stderr, c);
896 /* Update C->rep_bb, given that BB is added to the cluster. */
898 static void
899 update_rep_bb (bb_cluster c, basic_block bb)
901 /* Initial. */
902 if (c->rep_bb == NULL)
904 c->rep_bb = bb;
905 return;
908 /* Current needs no deps, keep it. */
909 if (BB_DEP_BB (c->rep_bb) == NULL)
910 return;
912 /* Bb needs no deps, change rep_bb. */
913 if (BB_DEP_BB (bb) == NULL)
915 c->rep_bb = bb;
916 return;
919 /* Bb needs last deps earlier than current, change rep_bb. A potential
920 problem with this, is that the first deps might also be earlier, which
921 would mean we prefer longer lifetimes for the deps. To be able to check
922 for this, we would have to trace BB_FIRST_DEP_BB as well, besides
923 BB_DEP_BB, which is really BB_LAST_DEP_BB.
924 The benefit of choosing the bb with last deps earlier, is that it can
925 potentially be used as replacement for more bbs. */
926 if (dominated_by_p (CDI_DOMINATORS, BB_DEP_BB (c->rep_bb), BB_DEP_BB (bb)))
927 c->rep_bb = bb;
930 /* Add BB to cluster C. Sets BB in C->bbs, and preds of BB in C->preds. */
932 static void
933 add_bb_to_cluster (bb_cluster c, basic_block bb)
935 edge e;
936 edge_iterator ei;
938 bitmap_set_bit (c->bbs, bb->index);
940 FOR_EACH_EDGE (e, ei, bb->preds)
941 bitmap_set_bit (c->preds, e->src->index);
943 update_rep_bb (c, bb);
946 /* Allocate and init new cluster. */
948 static bb_cluster
949 new_cluster (void)
951 bb_cluster c;
952 c = XCNEW (struct bb_cluster_def);
953 c->bbs = BITMAP_ALLOC (NULL);
954 c->preds = BITMAP_ALLOC (NULL);
955 c->rep_bb = NULL;
956 return c;
959 /* Delete clusters. */
961 static void
962 delete_cluster (bb_cluster c)
964 if (c == NULL)
965 return;
966 BITMAP_FREE (c->bbs);
967 BITMAP_FREE (c->preds);
968 XDELETE (c);
972 /* Array that contains all clusters. */
974 static vec<bb_cluster> all_clusters;
976 /* Allocate all cluster vectors. */
978 static void
979 alloc_cluster_vectors (void)
981 all_clusters.create (n_basic_blocks);
984 /* Reset all cluster vectors. */
986 static void
987 reset_cluster_vectors (void)
989 unsigned int i;
990 basic_block bb;
991 for (i = 0; i < all_clusters.length (); ++i)
992 delete_cluster (all_clusters[i]);
993 all_clusters.truncate (0);
994 FOR_EACH_BB (bb)
995 BB_CLUSTER (bb) = NULL;
998 /* Delete all cluster vectors. */
1000 static void
1001 delete_cluster_vectors (void)
1003 unsigned int i;
1004 for (i = 0; i < all_clusters.length (); ++i)
1005 delete_cluster (all_clusters[i]);
1006 all_clusters.release ();
1009 /* Merge cluster C2 into C1. */
1011 static void
1012 merge_clusters (bb_cluster c1, bb_cluster c2)
1014 bitmap_ior_into (c1->bbs, c2->bbs);
1015 bitmap_ior_into (c1->preds, c2->preds);
1018 /* Register equivalence of BB1 and BB2 (members of cluster C). Store c in
1019 all_clusters, or merge c with existing cluster. */
1021 static void
1022 set_cluster (basic_block bb1, basic_block bb2)
1024 basic_block merge_bb, other_bb;
1025 bb_cluster merge, old, c;
1027 if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
1029 c = new_cluster ();
1030 add_bb_to_cluster (c, bb1);
1031 add_bb_to_cluster (c, bb2);
1032 BB_CLUSTER (bb1) = c;
1033 BB_CLUSTER (bb2) = c;
1034 c->index = all_clusters.length ();
1035 all_clusters.safe_push (c);
1037 else if (BB_CLUSTER (bb1) == NULL || BB_CLUSTER (bb2) == NULL)
1039 merge_bb = BB_CLUSTER (bb1) == NULL ? bb2 : bb1;
1040 other_bb = BB_CLUSTER (bb1) == NULL ? bb1 : bb2;
1041 merge = BB_CLUSTER (merge_bb);
1042 add_bb_to_cluster (merge, other_bb);
1043 BB_CLUSTER (other_bb) = merge;
1045 else if (BB_CLUSTER (bb1) != BB_CLUSTER (bb2))
1047 unsigned int i;
1048 bitmap_iterator bi;
1050 old = BB_CLUSTER (bb2);
1051 merge = BB_CLUSTER (bb1);
1052 merge_clusters (merge, old);
1053 EXECUTE_IF_SET_IN_BITMAP (old->bbs, 0, i, bi)
1054 BB_CLUSTER (BASIC_BLOCK (i)) = merge;
1055 all_clusters[old->index] = NULL;
1056 update_rep_bb (merge, old->rep_bb);
1057 delete_cluster (old);
1059 else
1060 gcc_unreachable ();
1063 /* Return true if gimple statements S1 and S2 are equal. Gimple_bb (s1) and
1064 gimple_bb (s2) are members of SAME_SUCC. */
1066 static bool
1067 gimple_equal_p (same_succ same_succ, gimple s1, gimple s2)
1069 unsigned int i;
1070 tree lhs1, lhs2;
1071 basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
1072 tree t1, t2;
1073 bool equal, inv_cond;
1074 enum tree_code code1, code2;
1076 if (gimple_code (s1) != gimple_code (s2))
1077 return false;
1079 switch (gimple_code (s1))
1081 case GIMPLE_CALL:
1082 if (gimple_call_num_args (s1) != gimple_call_num_args (s2))
1083 return false;
1084 if (!gimple_call_same_target_p (s1, s2))
1085 return false;
1087 /* Eventually, we'll significantly complicate the CFG by adding
1088 back edges to properly model the effects of transaction restart.
1089 For the bulk of optimization this does not matter, but what we
1090 cannot recover from is tail merging blocks between two separate
1091 transactions. Avoid that by making commit not match. */
1092 if (gimple_call_builtin_p (s1, BUILT_IN_TM_COMMIT))
1093 return false;
1095 equal = true;
1096 for (i = 0; i < gimple_call_num_args (s1); ++i)
1098 t1 = gimple_call_arg (s1, i);
1099 t2 = gimple_call_arg (s2, i);
1100 if (operand_equal_p (t1, t2, 0))
1101 continue;
1102 if (gvn_uses_equal (t1, t2))
1103 continue;
1104 equal = false;
1105 break;
1107 if (!equal)
1108 return false;
1110 lhs1 = gimple_get_lhs (s1);
1111 lhs2 = gimple_get_lhs (s2);
1112 if (lhs1 == NULL_TREE && lhs2 == NULL_TREE)
1113 return true;
1114 if (lhs1 == NULL_TREE || lhs2 == NULL_TREE)
1115 return false;
1116 if (TREE_CODE (lhs1) == SSA_NAME && TREE_CODE (lhs2) == SSA_NAME)
1117 return vn_valueize (lhs1) == vn_valueize (lhs2);
1118 return operand_equal_p (lhs1, lhs2, 0);
1120 case GIMPLE_ASSIGN:
1121 lhs1 = gimple_get_lhs (s1);
1122 lhs2 = gimple_get_lhs (s2);
1123 if (TREE_CODE (lhs1) != SSA_NAME
1124 && TREE_CODE (lhs2) != SSA_NAME)
1125 return (vn_valueize (gimple_vdef (s1))
1126 == vn_valueize (gimple_vdef (s2)));
1127 else if (TREE_CODE (lhs1) == SSA_NAME
1128 && TREE_CODE (lhs2) == SSA_NAME)
1129 return vn_valueize (lhs1) == vn_valueize (lhs2);
1130 return false;
1132 case GIMPLE_COND:
1133 t1 = gimple_cond_lhs (s1);
1134 t2 = gimple_cond_lhs (s2);
1135 if (!operand_equal_p (t1, t2, 0)
1136 && !gvn_uses_equal (t1, t2))
1137 return false;
1139 t1 = gimple_cond_rhs (s1);
1140 t2 = gimple_cond_rhs (s2);
1141 if (!operand_equal_p (t1, t2, 0)
1142 && !gvn_uses_equal (t1, t2))
1143 return false;
1145 code1 = gimple_expr_code (s1);
1146 code2 = gimple_expr_code (s2);
1147 inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
1148 != bitmap_bit_p (same_succ->inverse, bb2->index));
1149 if (inv_cond)
1151 bool honor_nans
1152 = HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (s1))));
1153 code2 = invert_tree_comparison (code2, honor_nans);
1155 return code1 == code2;
1157 default:
1158 return false;
1162 /* Let GSI skip backwards over local defs. Return the earliest vuse in VUSE.
1163 Return true in VUSE_ESCAPED if the vuse influenced a SSA_OP_DEF of one of the
1164 processed statements. */
1166 static void
1167 gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
1168 bool *vuse_escaped)
1170 gimple stmt;
1171 tree lvuse;
1173 while (true)
1175 if (gsi_end_p (*gsi))
1176 return;
1177 stmt = gsi_stmt (*gsi);
1179 lvuse = gimple_vuse (stmt);
1180 if (lvuse != NULL_TREE)
1182 *vuse = lvuse;
1183 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_DEF))
1184 *vuse_escaped = true;
1187 if (!stmt_local_def (stmt))
1188 return;
1189 gsi_prev_nondebug (gsi);
1193 /* Determines whether BB1 and BB2 (members of same_succ) are duplicates. If so,
1194 clusters them. */
1196 static void
1197 find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
1199 gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
1200 gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
1201 tree vuse1 = NULL_TREE, vuse2 = NULL_TREE;
1202 bool vuse_escaped = false;
1204 gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
1205 gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
1207 while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
1209 gimple stmt1 = gsi_stmt (gsi1);
1210 gimple stmt2 = gsi_stmt (gsi2);
1212 if (!gimple_equal_p (same_succ, stmt1, stmt2))
1213 return;
1215 // We cannot tail-merge the builtins that end transactions.
1216 // ??? The alternative being unsharing of BBs in the tm_init pass.
1217 if (flag_tm
1218 && is_gimple_call (stmt1)
1219 && (gimple_call_flags (stmt1) & ECF_TM_BUILTIN)
1220 && is_tm_ending_fndecl (gimple_call_fndecl (stmt1)))
1221 return;
1223 gsi_prev_nondebug (&gsi1);
1224 gsi_prev_nondebug (&gsi2);
1225 gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
1226 gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
1229 if (!(gsi_end_p (gsi1) && gsi_end_p (gsi2)))
1230 return;
1232 /* If the incoming vuses are not the same, and the vuse escaped into an
1233 SSA_OP_DEF, then merging the 2 blocks will change the value of the def,
1234 which potentially means the semantics of one of the blocks will be changed.
1235 TODO: make this check more precise. */
1236 if (vuse_escaped && vuse1 != vuse2)
1237 return;
1239 if (dump_file)
1240 fprintf (dump_file, "find_duplicates: <bb %d> duplicate of <bb %d>\n",
1241 bb1->index, bb2->index);
1243 set_cluster (bb1, bb2);
1246 /* Returns whether for all phis in DEST the phi alternatives for E1 and
1247 E2 are equal. */
1249 static bool
1250 same_phi_alternatives_1 (basic_block dest, edge e1, edge e2)
1252 int n1 = e1->dest_idx, n2 = e2->dest_idx;
1253 gimple_stmt_iterator gsi;
1255 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
1257 gimple phi = gsi_stmt (gsi);
1258 tree lhs = gimple_phi_result (phi);
1259 tree val1 = gimple_phi_arg_def (phi, n1);
1260 tree val2 = gimple_phi_arg_def (phi, n2);
1262 if (virtual_operand_p (lhs))
1263 continue;
1265 if (operand_equal_for_phi_arg_p (val1, val2))
1266 continue;
1267 if (gvn_uses_equal (val1, val2))
1268 continue;
1270 return false;
1273 return true;
1276 /* Returns whether for all successors of BB1 and BB2 (members of SAME_SUCC), the
1277 phi alternatives for BB1 and BB2 are equal. */
1279 static bool
1280 same_phi_alternatives (same_succ same_succ, basic_block bb1, basic_block bb2)
1282 unsigned int s;
1283 bitmap_iterator bs;
1284 edge e1, e2;
1285 basic_block succ;
1287 EXECUTE_IF_SET_IN_BITMAP (same_succ->succs, 0, s, bs)
1289 succ = BASIC_BLOCK (s);
1290 e1 = find_edge (bb1, succ);
1291 e2 = find_edge (bb2, succ);
1292 if (e1->flags & EDGE_COMPLEX
1293 || e2->flags & EDGE_COMPLEX)
1294 return false;
1296 /* For all phis in bb, the phi alternatives for e1 and e2 need to have
1297 the same value. */
1298 if (!same_phi_alternatives_1 (succ, e1, e2))
1299 return false;
1302 return true;
1305 /* Return true if BB has non-vop phis. */
1307 static bool
1308 bb_has_non_vop_phi (basic_block bb)
1310 gimple_seq phis = phi_nodes (bb);
1311 gimple phi;
1313 if (phis == NULL)
1314 return false;
1316 if (!gimple_seq_singleton_p (phis))
1317 return true;
1319 phi = gimple_seq_first_stmt (phis);
1320 return !virtual_operand_p (gimple_phi_result (phi));
1323 /* Returns true if redirecting the incoming edges of FROM to TO maintains the
1324 invariant that uses in FROM are dominates by their defs. */
1326 static bool
1327 deps_ok_for_redirect_from_bb_to_bb (basic_block from, basic_block to)
1329 basic_block cd, dep_bb = BB_DEP_BB (to);
1330 edge_iterator ei;
1331 edge e;
1332 bitmap from_preds = BITMAP_ALLOC (NULL);
1334 if (dep_bb == NULL)
1335 return true;
1337 FOR_EACH_EDGE (e, ei, from->preds)
1338 bitmap_set_bit (from_preds, e->src->index);
1339 cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
1340 BITMAP_FREE (from_preds);
1342 return dominated_by_p (CDI_DOMINATORS, dep_bb, cd);
1345 /* Returns true if replacing BB1 (or its replacement bb) by BB2 (or its
1346 replacement bb) and vice versa maintains the invariant that uses in the
1347 replacement are dominates by their defs. */
1349 static bool
1350 deps_ok_for_redirect (basic_block bb1, basic_block bb2)
1352 if (BB_CLUSTER (bb1) != NULL)
1353 bb1 = BB_CLUSTER (bb1)->rep_bb;
1355 if (BB_CLUSTER (bb2) != NULL)
1356 bb2 = BB_CLUSTER (bb2)->rep_bb;
1358 return (deps_ok_for_redirect_from_bb_to_bb (bb1, bb2)
1359 && deps_ok_for_redirect_from_bb_to_bb (bb2, bb1));
1362 /* Within SAME_SUCC->bbs, find clusters of bbs which can be merged. */
1364 static void
1365 find_clusters_1 (same_succ same_succ)
1367 basic_block bb1, bb2;
1368 unsigned int i, j;
1369 bitmap_iterator bi, bj;
1370 int nr_comparisons;
1371 int max_comparisons = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_COMPARISONS);
1373 EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
1375 bb1 = BASIC_BLOCK (i);
1377 /* TODO: handle blocks with phi-nodes. We'll have to find corresponding
1378 phi-nodes in bb1 and bb2, with the same alternatives for the same
1379 preds. */
1380 if (bb_has_non_vop_phi (bb1))
1381 continue;
1383 nr_comparisons = 0;
1384 EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, i + 1, j, bj)
1386 bb2 = BASIC_BLOCK (j);
1388 if (bb_has_non_vop_phi (bb2))
1389 continue;
1391 if (BB_CLUSTER (bb1) != NULL && BB_CLUSTER (bb1) == BB_CLUSTER (bb2))
1392 continue;
1394 /* Limit quadratic behaviour. */
1395 nr_comparisons++;
1396 if (nr_comparisons > max_comparisons)
1397 break;
1399 /* This is a conservative dependency check. We could test more
1400 precise for allowed replacement direction. */
1401 if (!deps_ok_for_redirect (bb1, bb2))
1402 continue;
1404 if (!(same_phi_alternatives (same_succ, bb1, bb2)))
1405 continue;
1407 find_duplicate (same_succ, bb1, bb2);
1412 /* Find clusters of bbs which can be merged. */
1414 static void
1415 find_clusters (void)
1417 same_succ same;
1419 while (!worklist.is_empty ())
1421 same = worklist.pop ();
1422 same->in_worklist = false;
1423 if (dump_file && (dump_flags & TDF_DETAILS))
1425 fprintf (dump_file, "processing worklist entry\n");
1426 same_succ_print (dump_file, same);
1428 find_clusters_1 (same);
1432 /* Returns the vop phi of BB, if any. */
1434 static gimple
1435 vop_phi (basic_block bb)
1437 gimple stmt;
1438 gimple_stmt_iterator gsi;
1439 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1441 stmt = gsi_stmt (gsi);
1442 if (! virtual_operand_p (gimple_phi_result (stmt)))
1443 continue;
1444 return stmt;
1446 return NULL;
1449 /* Redirect all edges from BB1 to BB2, removes BB1 and marks it as removed. */
1451 static void
1452 replace_block_by (basic_block bb1, basic_block bb2)
1454 edge pred_edge;
1455 unsigned int i;
1456 gimple bb2_phi;
1458 bb2_phi = vop_phi (bb2);
1460 /* Mark the basic block as deleted. */
1461 mark_basic_block_deleted (bb1);
1463 /* ??? If we merge the loop preheader with the loop latch we are creating
1464 additional entries into the loop, eventually rotating it.
1465 Mark loops for fixup in this case.
1466 ??? This is a completely unwanted transform and will wreck most
1467 loops at this point - but with just not considering loop latches as
1468 merge candidates we fail to commonize the two loops in gcc.dg/pr50763.c.
1469 A better fix to avoid that regression is needed. */
1470 if (current_loops
1471 && bb2->loop_father->latch == bb2)
1472 loops_state_set (LOOPS_NEED_FIXUP);
1474 /* Redirect the incoming edges of bb1 to bb2. */
1475 for (i = EDGE_COUNT (bb1->preds); i > 0 ; --i)
1477 pred_edge = EDGE_PRED (bb1, i - 1);
1478 pred_edge = redirect_edge_and_branch (pred_edge, bb2);
1479 gcc_assert (pred_edge != NULL);
1481 if (bb2_phi == NULL)
1482 continue;
1484 /* The phi might have run out of capacity when the redirect added an
1485 argument, which means it could have been replaced. Refresh it. */
1486 bb2_phi = vop_phi (bb2);
1488 add_phi_arg (bb2_phi, SSA_NAME_VAR (gimple_phi_result (bb2_phi)),
1489 pred_edge, UNKNOWN_LOCATION);
1492 bb2->frequency += bb1->frequency;
1493 if (bb2->frequency > BB_FREQ_MAX)
1494 bb2->frequency = BB_FREQ_MAX;
1496 bb2->count += bb1->count;
1498 /* Do updates that use bb1, before deleting bb1. */
1499 release_last_vdef (bb1);
1500 same_succ_flush_bb (bb1);
1502 delete_basic_block (bb1);
1505 /* Bbs for which update_debug_stmt need to be called. */
1507 static bitmap update_bbs;
1509 /* For each cluster in all_clusters, merge all cluster->bbs. Returns
1510 number of bbs removed. */
1512 static int
1513 apply_clusters (void)
1515 basic_block bb1, bb2;
1516 bb_cluster c;
1517 unsigned int i, j;
1518 bitmap_iterator bj;
1519 int nr_bbs_removed = 0;
1521 for (i = 0; i < all_clusters.length (); ++i)
1523 c = all_clusters[i];
1524 if (c == NULL)
1525 continue;
1527 bb2 = c->rep_bb;
1528 bitmap_set_bit (update_bbs, bb2->index);
1530 bitmap_clear_bit (c->bbs, bb2->index);
1531 EXECUTE_IF_SET_IN_BITMAP (c->bbs, 0, j, bj)
1533 bb1 = BASIC_BLOCK (j);
1534 bitmap_clear_bit (update_bbs, bb1->index);
1536 replace_block_by (bb1, bb2);
1537 nr_bbs_removed++;
1541 return nr_bbs_removed;
1544 /* Resets debug statement STMT if it has uses that are not dominated by their
1545 defs. */
1547 static void
1548 update_debug_stmt (gimple stmt)
1550 use_operand_p use_p;
1551 ssa_op_iter oi;
1552 basic_block bbdef, bbuse;
1553 gimple def_stmt;
1554 tree name;
1556 if (!gimple_debug_bind_p (stmt))
1557 return;
1559 bbuse = gimple_bb (stmt);
1560 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
1562 name = USE_FROM_PTR (use_p);
1563 gcc_assert (TREE_CODE (name) == SSA_NAME);
1565 def_stmt = SSA_NAME_DEF_STMT (name);
1566 gcc_assert (def_stmt != NULL);
1568 bbdef = gimple_bb (def_stmt);
1569 if (bbdef == NULL || bbuse == bbdef
1570 || dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
1571 continue;
1573 gimple_debug_bind_reset_value (stmt);
1574 update_stmt (stmt);
1578 /* Resets all debug statements that have uses that are not
1579 dominated by their defs. */
1581 static void
1582 update_debug_stmts (void)
1584 basic_block bb;
1585 bitmap_iterator bi;
1586 unsigned int i;
1588 EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
1590 gimple stmt;
1591 gimple_stmt_iterator gsi;
1593 bb = BASIC_BLOCK (i);
1594 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1596 stmt = gsi_stmt (gsi);
1597 if (!is_gimple_debug (stmt))
1598 continue;
1599 update_debug_stmt (stmt);
1604 /* Runs tail merge optimization. */
1606 unsigned int
1607 tail_merge_optimize (unsigned int todo)
1609 int nr_bbs_removed_total = 0;
1610 int nr_bbs_removed;
1611 bool loop_entered = false;
1612 int iteration_nr = 0;
1613 int max_iterations = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_ITERATIONS);
1615 if (!flag_tree_tail_merge || max_iterations == 0)
1616 return 0;
1618 timevar_push (TV_TREE_TAIL_MERGE);
1620 if (!dom_info_available_p (CDI_DOMINATORS))
1622 /* PRE can leave us with unreachable blocks, remove them now. */
1623 delete_unreachable_blocks ();
1624 calculate_dominance_info (CDI_DOMINATORS);
1626 init_worklist ();
1628 while (!worklist.is_empty ())
1630 if (!loop_entered)
1632 loop_entered = true;
1633 alloc_cluster_vectors ();
1634 update_bbs = BITMAP_ALLOC (NULL);
1636 else
1637 reset_cluster_vectors ();
1639 iteration_nr++;
1640 if (dump_file && (dump_flags & TDF_DETAILS))
1641 fprintf (dump_file, "worklist iteration #%d\n", iteration_nr);
1643 find_clusters ();
1644 gcc_assert (worklist.is_empty ());
1645 if (all_clusters.is_empty ())
1646 break;
1648 nr_bbs_removed = apply_clusters ();
1649 nr_bbs_removed_total += nr_bbs_removed;
1650 if (nr_bbs_removed == 0)
1651 break;
1653 free_dominance_info (CDI_DOMINATORS);
1655 if (iteration_nr == max_iterations)
1656 break;
1658 calculate_dominance_info (CDI_DOMINATORS);
1659 update_worklist ();
1662 if (dump_file && (dump_flags & TDF_DETAILS))
1663 fprintf (dump_file, "htab collision / search: %f\n",
1664 same_succ_htab.collisions ());
1666 if (nr_bbs_removed_total > 0)
1668 if (MAY_HAVE_DEBUG_STMTS)
1670 calculate_dominance_info (CDI_DOMINATORS);
1671 update_debug_stmts ();
1674 if (dump_file && (dump_flags & TDF_DETAILS))
1676 fprintf (dump_file, "Before TODOs.\n");
1677 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1680 todo |= (TODO_verify_ssa | TODO_verify_stmts | TODO_verify_flow);
1681 mark_virtual_operands_for_renaming (cfun);
1684 delete_worklist ();
1685 if (loop_entered)
1687 delete_cluster_vectors ();
1688 BITMAP_FREE (update_bbs);
1691 timevar_pop (TV_TREE_TAIL_MERGE);
1693 return todo;