2013-11-13 Christophe Lyon <christophe.lyon@linaro.org>
[official-gcc.git] / gcc / tree-ssa-tail-merge.c
blobdb95ce1059c16b660fd390d31324e70f32007c9f
1 /* Tail merging for gimple.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Tom de Vries (tom@codesourcery.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Pass overview.
24 MOTIVATIONAL EXAMPLE
26 gimple representation of gcc/testsuite/gcc.dg/pr43864.c at
28 hprofStartupp (charD.1 * outputFileNameD.2600, charD.1 * ctxD.2601)
30 struct FILED.1638 * fpD.2605;
31 charD.1 fileNameD.2604[1000];
32 intD.0 D.3915;
33 const charD.1 * restrict outputFileName.0D.3914;
35 # BLOCK 2 freq:10000
36 # PRED: ENTRY [100.0%] (fallthru,exec)
37 # PT = nonlocal { D.3926 } (restr)
38 outputFileName.0D.3914_3
39 = (const charD.1 * restrict) outputFileNameD.2600_2(D);
40 # .MEMD.3923_13 = VDEF <.MEMD.3923_12(D)>
41 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
42 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
43 sprintfD.759 (&fileNameD.2604, outputFileName.0D.3914_3);
44 # .MEMD.3923_14 = VDEF <.MEMD.3923_13>
45 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
46 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
47 D.3915_4 = accessD.2606 (&fileNameD.2604, 1);
48 if (D.3915_4 == 0)
49 goto <bb 3>;
50 else
51 goto <bb 4>;
52 # SUCC: 3 [10.0%] (true,exec) 4 [90.0%] (false,exec)
54 # BLOCK 3 freq:1000
55 # PRED: 2 [10.0%] (true,exec)
56 # .MEMD.3923_15 = VDEF <.MEMD.3923_14>
57 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
58 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
59 freeD.898 (ctxD.2601_5(D));
60 goto <bb 7>;
61 # SUCC: 7 [100.0%] (fallthru,exec)
63 # BLOCK 4 freq:9000
64 # PRED: 2 [90.0%] (false,exec)
65 # .MEMD.3923_16 = VDEF <.MEMD.3923_14>
66 # PT = nonlocal escaped
67 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
68 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
69 fpD.2605_8 = fopenD.1805 (&fileNameD.2604[0], 0B);
70 if (fpD.2605_8 == 0B)
71 goto <bb 5>;
72 else
73 goto <bb 6>;
74 # SUCC: 5 [1.9%] (true,exec) 6 [98.1%] (false,exec)
76 # BLOCK 5 freq:173
77 # PRED: 4 [1.9%] (true,exec)
78 # .MEMD.3923_17 = VDEF <.MEMD.3923_16>
79 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
80 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
81 freeD.898 (ctxD.2601_5(D));
82 goto <bb 7>;
83 # SUCC: 7 [100.0%] (fallthru,exec)
85 # BLOCK 6 freq:8827
86 # PRED: 4 [98.1%] (false,exec)
87 # .MEMD.3923_18 = VDEF <.MEMD.3923_16>
88 # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
89 # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
90 fooD.2599 (outputFileNameD.2600_2(D), fpD.2605_8);
91 # SUCC: 7 [100.0%] (fallthru,exec)
93 # BLOCK 7 freq:10000
94 # PRED: 3 [100.0%] (fallthru,exec) 5 [100.0%] (fallthru,exec)
95 6 [100.0%] (fallthru,exec)
96 # PT = nonlocal null
98 # ctxD.2601_1 = PHI <0B(3), 0B(5), ctxD.2601_5(D)(6)>
99 # .MEMD.3923_11 = PHI <.MEMD.3923_15(3), .MEMD.3923_17(5),
100 .MEMD.3923_18(6)>
101 # VUSE <.MEMD.3923_11>
102 return ctxD.2601_1;
103 # SUCC: EXIT [100.0%]
106 bb 3 and bb 5 can be merged. The blocks have different predecessors, but the
107 same successors, and the same operations.
110 CONTEXT
112 A technique called tail merging (or cross jumping) can fix the example
113 above. For a block, we look for common code at the end (the tail) of the
114 predecessor blocks, and insert jumps from one block to the other.
115 The example is a special case for tail merging, in that 2 whole blocks
116 can be merged, rather than just the end parts of it.
117 We currently only focus on whole block merging, so in that sense
118 calling this pass tail merge is a bit of a misnomer.
120 We distinguish 2 kinds of situations in which blocks can be merged:
121 - same operations, same predecessors. The successor edges coming from one
122 block are redirected to come from the other block.
123 - same operations, same successors. The predecessor edges entering one block
124 are redirected to enter the other block. Note that this operation might
125 involve introducing phi operations.
127 For efficient implementation, we would like to value numbers the blocks, and
128 have a comparison operator that tells us whether the blocks are equal.
129 Besides being runtime efficient, block value numbering should also abstract
130 from irrelevant differences in order of operations, much like normal value
131 numbering abstracts from irrelevant order of operations.
133 For the first situation (same_operations, same predecessors), normal value
134 numbering fits well. We can calculate a block value number based on the
135 value numbers of the defs and vdefs.
137 For the second situation (same operations, same successors), this approach
138 doesn't work so well. We can illustrate this using the example. The calls
139 to free use different vdefs: MEMD.3923_16 and MEMD.3923_14, and these will
140 remain different in value numbering, since they represent different memory
141 states. So the resulting vdefs of the frees will be different in value
142 numbering, so the block value numbers will be different.
144 The reason why we call the blocks equal is not because they define the same
145 values, but because uses in the blocks use (possibly different) defs in the
146 same way. To be able to detect this efficiently, we need to do some kind of
147 reverse value numbering, meaning number the uses rather than the defs, and
148 calculate a block value number based on the value number of the uses.
149 Ideally, a block comparison operator will also indicate which phis are needed
150 to merge the blocks.
152 For the moment, we don't do block value numbering, but we do insn-by-insn
153 matching, using scc value numbers to match operations with results, and
154 structural comparison otherwise, while ignoring vop mismatches.
157 IMPLEMENTATION
159 1. The pass first determines all groups of blocks with the same successor
160 blocks.
161 2. Within each group, it tries to determine clusters of equal basic blocks.
162 3. The clusters are applied.
163 4. The same successor groups are updated.
164 5. This process is repeated from 2 onwards, until no more changes.
167 LIMITATIONS/TODO
169 - block only
170 - handles only 'same operations, same successors'.
171 It handles same predecessors as a special subcase though.
172 - does not implement the reverse value numbering and block value numbering.
173 - improve memory allocation: use garbage collected memory, obstacks,
174 allocpools where appropriate.
175 - no insertion of gimple_reg phis, We only introduce vop-phis.
176 - handle blocks with gimple_reg phi_nodes.
179 PASS PLACEMENT
180 This 'pass' is not a stand-alone gimple pass, but runs as part of
181 pass_pre, in order to share the value numbering.
184 SWITCHES
186 - ftree-tail-merge. On at -O2. We may have to enable it only at -Os. */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "tm_p.h"
194 #include "basic-block.h"
195 #include "flags.h"
196 #include "function.h"
197 #include "gimple.h"
198 #include "gimple-ssa.h"
199 #include "tree-cfg.h"
200 #include "tree-phinodes.h"
201 #include "ssa-iterators.h"
202 #include "tree-into-ssa.h"
203 #include "tree-ssa-alias.h"
204 #include "params.h"
205 #include "hash-table.h"
206 #include "gimple-pretty-print.h"
207 #include "tree-ssa-sccvn.h"
208 #include "tree-dump.h"
209 #include "cfgloop.h"
210 #include "tree-pass.h"
212 /* Describes a group of bbs with the same successors. The successor bbs are
213 cached in succs, and the successor edge flags are cached in succ_flags.
214 If a bb has the EDGE_TRUE/VALSE_VALUE flags swapped compared to succ_flags,
215 it's marked in inverse.
216 Additionally, the hash value for the struct is cached in hashval, and
217 in_worklist indicates whether it's currently part of worklist. */
219 struct same_succ_def
221 /* The bbs that have the same successor bbs. */
222 bitmap bbs;
223 /* The successor bbs. */
224 bitmap succs;
225 /* Indicates whether the EDGE_TRUE/FALSE_VALUEs of succ_flags are swapped for
226 bb. */
227 bitmap inverse;
228 /* The edge flags for each of the successor bbs. */
229 vec<int> succ_flags;
230 /* Indicates whether the struct is currently in the worklist. */
231 bool in_worklist;
232 /* The hash value of the struct. */
233 hashval_t hashval;
235 /* hash_table support. */
236 typedef same_succ_def value_type;
237 typedef same_succ_def compare_type;
238 static inline hashval_t hash (const value_type *);
239 static int equal (const value_type *, const compare_type *);
240 static void remove (value_type *);
242 typedef struct same_succ_def *same_succ;
243 typedef const struct same_succ_def *const_same_succ;
245 /* hash routine for hash_table support, returns hashval of E. */
247 inline hashval_t
248 same_succ_def::hash (const value_type *e)
250 return e->hashval;
253 /* A group of bbs where 1 bb from bbs can replace the other bbs. */
255 struct bb_cluster_def
257 /* The bbs in the cluster. */
258 bitmap bbs;
259 /* The preds of the bbs in the cluster. */
260 bitmap preds;
261 /* Index in all_clusters vector. */
262 int index;
263 /* The bb to replace the cluster with. */
264 basic_block rep_bb;
266 typedef struct bb_cluster_def *bb_cluster;
267 typedef const struct bb_cluster_def *const_bb_cluster;
269 /* Per bb-info. */
271 struct aux_bb_info
273 /* The number of non-debug statements in the bb. */
274 int size;
275 /* The same_succ that this bb is a member of. */
276 same_succ bb_same_succ;
277 /* The cluster that this bb is a member of. */
278 bb_cluster cluster;
279 /* The vop state at the exit of a bb. This is shortlived data, used to
280 communicate data between update_block_by and update_vuses. */
281 tree vop_at_exit;
282 /* The bb that either contains or is dominated by the dependencies of the
283 bb. */
284 basic_block dep_bb;
287 /* Macros to access the fields of struct aux_bb_info. */
289 #define BB_SIZE(bb) (((struct aux_bb_info *)bb->aux)->size)
290 #define BB_SAME_SUCC(bb) (((struct aux_bb_info *)bb->aux)->bb_same_succ)
291 #define BB_CLUSTER(bb) (((struct aux_bb_info *)bb->aux)->cluster)
292 #define BB_VOP_AT_EXIT(bb) (((struct aux_bb_info *)bb->aux)->vop_at_exit)
293 #define BB_DEP_BB(bb) (((struct aux_bb_info *)bb->aux)->dep_bb)
295 /* Returns true if the only effect a statement STMT has, is to define locally
296 used SSA_NAMEs. */
298 static bool
299 stmt_local_def (gimple stmt)
301 basic_block bb, def_bb;
302 imm_use_iterator iter;
303 use_operand_p use_p;
304 tree val;
305 def_operand_p def_p;
307 if (gimple_has_side_effects (stmt)
308 || gimple_vdef (stmt) != NULL_TREE)
309 return false;
311 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
312 if (def_p == NULL)
313 return false;
315 val = DEF_FROM_PTR (def_p);
316 if (val == NULL_TREE || TREE_CODE (val) != SSA_NAME)
317 return false;
319 def_bb = gimple_bb (stmt);
321 FOR_EACH_IMM_USE_FAST (use_p, iter, val)
323 if (is_gimple_debug (USE_STMT (use_p)))
324 continue;
325 bb = gimple_bb (USE_STMT (use_p));
326 if (bb == def_bb)
327 continue;
329 if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI
330 && EDGE_PRED (bb, PHI_ARG_INDEX_FROM_USE (use_p))->src == def_bb)
331 continue;
333 return false;
336 return true;
339 /* Let GSI skip forwards over local defs. */
341 static void
342 gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
344 gimple stmt;
346 while (true)
348 if (gsi_end_p (*gsi))
349 return;
350 stmt = gsi_stmt (*gsi);
351 if (!stmt_local_def (stmt))
352 return;
353 gsi_next_nondebug (gsi);
357 /* VAL1 and VAL2 are either:
358 - uses in BB1 and BB2, or
359 - phi alternatives for BB1 and BB2.
360 Return true if the uses have the same gvn value. */
362 static bool
363 gvn_uses_equal (tree val1, tree val2)
365 gcc_checking_assert (val1 != NULL_TREE && val2 != NULL_TREE);
367 if (val1 == val2)
368 return true;
370 if (vn_valueize (val1) != vn_valueize (val2))
371 return false;
373 return ((TREE_CODE (val1) == SSA_NAME || CONSTANT_CLASS_P (val1))
374 && (TREE_CODE (val2) == SSA_NAME || CONSTANT_CLASS_P (val2)));
377 /* Prints E to FILE. */
379 static void
380 same_succ_print (FILE *file, const same_succ e)
382 unsigned int i;
383 bitmap_print (file, e->bbs, "bbs:", "\n");
384 bitmap_print (file, e->succs, "succs:", "\n");
385 bitmap_print (file, e->inverse, "inverse:", "\n");
386 fprintf (file, "flags:");
387 for (i = 0; i < e->succ_flags.length (); ++i)
388 fprintf (file, " %x", e->succ_flags[i]);
389 fprintf (file, "\n");
392 /* Prints same_succ VE to VFILE. */
394 inline int
395 ssa_same_succ_print_traverse (same_succ *pe, FILE *file)
397 const same_succ e = *pe;
398 same_succ_print (file, e);
399 return 1;
402 /* Update BB_DEP_BB (USE_BB), given a use of VAL in USE_BB. */
404 static void
405 update_dep_bb (basic_block use_bb, tree val)
407 basic_block dep_bb;
409 /* Not a dep. */
410 if (TREE_CODE (val) != SSA_NAME)
411 return;
413 /* Skip use of global def. */
414 if (SSA_NAME_IS_DEFAULT_DEF (val))
415 return;
417 /* Skip use of local def. */
418 dep_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
419 if (dep_bb == use_bb)
420 return;
422 if (BB_DEP_BB (use_bb) == NULL
423 || dominated_by_p (CDI_DOMINATORS, dep_bb, BB_DEP_BB (use_bb)))
424 BB_DEP_BB (use_bb) = dep_bb;
427 /* Update BB_DEP_BB, given the dependencies in STMT. */
429 static void
430 stmt_update_dep_bb (gimple stmt)
432 ssa_op_iter iter;
433 use_operand_p use;
435 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
436 update_dep_bb (gimple_bb (stmt), USE_FROM_PTR (use));
439 /* Calculates hash value for same_succ VE. */
441 static hashval_t
442 same_succ_hash (const_same_succ e)
444 hashval_t hashval = bitmap_hash (e->succs);
445 int flags;
446 unsigned int i;
447 unsigned int first = bitmap_first_set_bit (e->bbs);
448 basic_block bb = BASIC_BLOCK (first);
449 int size = 0;
450 gimple_stmt_iterator gsi;
451 gimple stmt;
452 tree arg;
453 unsigned int s;
454 bitmap_iterator bs;
456 for (gsi = gsi_start_nondebug_bb (bb);
457 !gsi_end_p (gsi); gsi_next_nondebug (&gsi))
459 stmt = gsi_stmt (gsi);
460 stmt_update_dep_bb (stmt);
461 if (stmt_local_def (stmt))
462 continue;
463 size++;
465 hashval = iterative_hash_hashval_t (gimple_code (stmt), hashval);
466 if (is_gimple_assign (stmt))
467 hashval = iterative_hash_hashval_t (gimple_assign_rhs_code (stmt),
468 hashval);
469 if (!is_gimple_call (stmt))
470 continue;
471 if (gimple_call_internal_p (stmt))
472 hashval = iterative_hash_hashval_t
473 ((hashval_t) gimple_call_internal_fn (stmt), hashval);
474 else
475 hashval = iterative_hash_expr (gimple_call_fn (stmt), hashval);
476 for (i = 0; i < gimple_call_num_args (stmt); i++)
478 arg = gimple_call_arg (stmt, i);
479 arg = vn_valueize (arg);
480 hashval = iterative_hash_expr (arg, hashval);
484 hashval = iterative_hash_hashval_t (size, hashval);
485 BB_SIZE (bb) = size;
487 for (i = 0; i < e->succ_flags.length (); ++i)
489 flags = e->succ_flags[i];
490 flags = flags & ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
491 hashval = iterative_hash_hashval_t (flags, hashval);
494 EXECUTE_IF_SET_IN_BITMAP (e->succs, 0, s, bs)
496 int n = find_edge (bb, BASIC_BLOCK (s))->dest_idx;
497 for (gsi = gsi_start_phis (BASIC_BLOCK (s)); !gsi_end_p (gsi);
498 gsi_next (&gsi))
500 gimple phi = gsi_stmt (gsi);
501 tree lhs = gimple_phi_result (phi);
502 tree val = gimple_phi_arg_def (phi, n);
504 if (virtual_operand_p (lhs))
505 continue;
506 update_dep_bb (bb, val);
510 return hashval;
513 /* Returns true if E1 and E2 have 2 successors, and if the successor flags
514 are inverse for the EDGE_TRUE_VALUE and EDGE_FALSE_VALUE flags, and equal for
515 the other edge flags. */
517 static bool
518 inverse_flags (const_same_succ e1, const_same_succ e2)
520 int f1a, f1b, f2a, f2b;
521 int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
523 if (e1->succ_flags.length () != 2)
524 return false;
526 f1a = e1->succ_flags[0];
527 f1b = e1->succ_flags[1];
528 f2a = e2->succ_flags[0];
529 f2b = e2->succ_flags[1];
531 if (f1a == f2a && f1b == f2b)
532 return false;
534 return (f1a & mask) == (f2a & mask) && (f1b & mask) == (f2b & mask);
537 /* Compares SAME_SUCCs E1 and E2. */
540 same_succ_def::equal (const value_type *e1, const compare_type *e2)
542 unsigned int i, first1, first2;
543 gimple_stmt_iterator gsi1, gsi2;
544 gimple s1, s2;
545 basic_block bb1, bb2;
547 if (e1->hashval != e2->hashval)
548 return 0;
550 if (e1->succ_flags.length () != e2->succ_flags.length ())
551 return 0;
553 if (!bitmap_equal_p (e1->succs, e2->succs))
554 return 0;
556 if (!inverse_flags (e1, e2))
558 for (i = 0; i < e1->succ_flags.length (); ++i)
559 if (e1->succ_flags[i] != e1->succ_flags[i])
560 return 0;
563 first1 = bitmap_first_set_bit (e1->bbs);
564 first2 = bitmap_first_set_bit (e2->bbs);
566 bb1 = BASIC_BLOCK (first1);
567 bb2 = BASIC_BLOCK (first2);
569 if (BB_SIZE (bb1) != BB_SIZE (bb2))
570 return 0;
572 gsi1 = gsi_start_nondebug_bb (bb1);
573 gsi2 = gsi_start_nondebug_bb (bb2);
574 gsi_advance_fw_nondebug_nonlocal (&gsi1);
575 gsi_advance_fw_nondebug_nonlocal (&gsi2);
576 while (!(gsi_end_p (gsi1) || gsi_end_p (gsi2)))
578 s1 = gsi_stmt (gsi1);
579 s2 = gsi_stmt (gsi2);
580 if (gimple_code (s1) != gimple_code (s2))
581 return 0;
582 if (is_gimple_call (s1) && !gimple_call_same_target_p (s1, s2))
583 return 0;
584 gsi_next_nondebug (&gsi1);
585 gsi_next_nondebug (&gsi2);
586 gsi_advance_fw_nondebug_nonlocal (&gsi1);
587 gsi_advance_fw_nondebug_nonlocal (&gsi2);
590 return 1;
593 /* Alloc and init a new SAME_SUCC. */
595 static same_succ
596 same_succ_alloc (void)
598 same_succ same = XNEW (struct same_succ_def);
600 same->bbs = BITMAP_ALLOC (NULL);
601 same->succs = BITMAP_ALLOC (NULL);
602 same->inverse = BITMAP_ALLOC (NULL);
603 same->succ_flags.create (10);
604 same->in_worklist = false;
606 return same;
609 /* Delete same_succ E. */
611 void
612 same_succ_def::remove (same_succ e)
614 BITMAP_FREE (e->bbs);
615 BITMAP_FREE (e->succs);
616 BITMAP_FREE (e->inverse);
617 e->succ_flags.release ();
619 XDELETE (e);
622 /* Reset same_succ SAME. */
624 static void
625 same_succ_reset (same_succ same)
627 bitmap_clear (same->bbs);
628 bitmap_clear (same->succs);
629 bitmap_clear (same->inverse);
630 same->succ_flags.truncate (0);
633 static hash_table <same_succ_def> same_succ_htab;
635 /* Array that is used to store the edge flags for a successor. */
637 static int *same_succ_edge_flags;
639 /* Bitmap that is used to mark bbs that are recently deleted. */
641 static bitmap deleted_bbs;
643 /* Bitmap that is used to mark predecessors of bbs that are
644 deleted. */
646 static bitmap deleted_bb_preds;
648 /* Prints same_succ_htab to stderr. */
650 extern void debug_same_succ (void);
651 DEBUG_FUNCTION void
652 debug_same_succ ( void)
654 same_succ_htab.traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
658 /* Vector of bbs to process. */
660 static vec<same_succ> worklist;
662 /* Prints worklist to FILE. */
664 static void
665 print_worklist (FILE *file)
667 unsigned int i;
668 for (i = 0; i < worklist.length (); ++i)
669 same_succ_print (file, worklist[i]);
672 /* Adds SAME to worklist. */
674 static void
675 add_to_worklist (same_succ same)
677 if (same->in_worklist)
678 return;
680 if (bitmap_count_bits (same->bbs) < 2)
681 return;
683 same->in_worklist = true;
684 worklist.safe_push (same);
687 /* Add BB to same_succ_htab. */
689 static void
690 find_same_succ_bb (basic_block bb, same_succ *same_p)
692 unsigned int j;
693 bitmap_iterator bj;
694 same_succ same = *same_p;
695 same_succ *slot;
696 edge_iterator ei;
697 edge e;
699 if (bb == NULL
700 /* Be conservative with loop structure. It's not evident that this test
701 is sufficient. Before tail-merge, we've just called
702 loop_optimizer_finalize, and LOOPS_MAY_HAVE_MULTIPLE_LATCHES is now
703 set, so there's no guarantee that the loop->latch value is still valid.
704 But we assume that, since we've forced LOOPS_HAVE_SIMPLE_LATCHES at the
705 start of pre, we've kept that property intact throughout pre, and are
706 keeping it throughout tail-merge using this test. */
707 || bb->loop_father->latch == bb)
708 return;
709 bitmap_set_bit (same->bbs, bb->index);
710 FOR_EACH_EDGE (e, ei, bb->succs)
712 int index = e->dest->index;
713 bitmap_set_bit (same->succs, index);
714 same_succ_edge_flags[index] = e->flags;
716 EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
717 same->succ_flags.safe_push (same_succ_edge_flags[j]);
719 same->hashval = same_succ_hash (same);
721 slot = same_succ_htab.find_slot_with_hash (same, same->hashval, INSERT);
722 if (*slot == NULL)
724 *slot = same;
725 BB_SAME_SUCC (bb) = same;
726 add_to_worklist (same);
727 *same_p = NULL;
729 else
731 bitmap_set_bit ((*slot)->bbs, bb->index);
732 BB_SAME_SUCC (bb) = *slot;
733 add_to_worklist (*slot);
734 if (inverse_flags (same, *slot))
735 bitmap_set_bit ((*slot)->inverse, bb->index);
736 same_succ_reset (same);
740 /* Find bbs with same successors. */
742 static void
743 find_same_succ (void)
745 same_succ same = same_succ_alloc ();
746 basic_block bb;
748 FOR_EACH_BB (bb)
750 find_same_succ_bb (bb, &same);
751 if (same == NULL)
752 same = same_succ_alloc ();
755 same_succ_def::remove (same);
758 /* Initializes worklist administration. */
760 static void
761 init_worklist (void)
763 alloc_aux_for_blocks (sizeof (struct aux_bb_info));
764 same_succ_htab.create (n_basic_blocks);
765 same_succ_edge_flags = XCNEWVEC (int, last_basic_block);
766 deleted_bbs = BITMAP_ALLOC (NULL);
767 deleted_bb_preds = BITMAP_ALLOC (NULL);
768 worklist.create (n_basic_blocks);
769 find_same_succ ();
771 if (dump_file && (dump_flags & TDF_DETAILS))
773 fprintf (dump_file, "initial worklist:\n");
774 print_worklist (dump_file);
778 /* Deletes worklist administration. */
780 static void
781 delete_worklist (void)
783 free_aux_for_blocks ();
784 same_succ_htab.dispose ();
785 XDELETEVEC (same_succ_edge_flags);
786 same_succ_edge_flags = NULL;
787 BITMAP_FREE (deleted_bbs);
788 BITMAP_FREE (deleted_bb_preds);
789 worklist.release ();
792 /* Mark BB as deleted, and mark its predecessors. */
794 static void
795 mark_basic_block_deleted (basic_block bb)
797 edge e;
798 edge_iterator ei;
800 bitmap_set_bit (deleted_bbs, bb->index);
802 FOR_EACH_EDGE (e, ei, bb->preds)
803 bitmap_set_bit (deleted_bb_preds, e->src->index);
806 /* Removes BB from its corresponding same_succ. */
808 static void
809 same_succ_flush_bb (basic_block bb)
811 same_succ same = BB_SAME_SUCC (bb);
812 BB_SAME_SUCC (bb) = NULL;
813 if (bitmap_single_bit_set_p (same->bbs))
814 same_succ_htab.remove_elt_with_hash (same, same->hashval);
815 else
816 bitmap_clear_bit (same->bbs, bb->index);
819 /* Removes all bbs in BBS from their corresponding same_succ. */
821 static void
822 same_succ_flush_bbs (bitmap bbs)
824 unsigned int i;
825 bitmap_iterator bi;
827 EXECUTE_IF_SET_IN_BITMAP (bbs, 0, i, bi)
828 same_succ_flush_bb (BASIC_BLOCK (i));
831 /* Release the last vdef in BB, either normal or phi result. */
833 static void
834 release_last_vdef (basic_block bb)
836 gimple_stmt_iterator i;
838 for (i = gsi_last_bb (bb); !gsi_end_p (i); gsi_prev_nondebug (&i))
840 gimple stmt = gsi_stmt (i);
841 if (gimple_vdef (stmt) == NULL_TREE)
842 continue;
844 mark_virtual_operand_for_renaming (gimple_vdef (stmt));
845 return;
848 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
850 gimple phi = gsi_stmt (i);
851 tree res = gimple_phi_result (phi);
853 if (!virtual_operand_p (res))
854 continue;
856 mark_virtual_phi_result_for_renaming (phi);
857 return;
862 /* For deleted_bb_preds, find bbs with same successors. */
864 static void
865 update_worklist (void)
867 unsigned int i;
868 bitmap_iterator bi;
869 basic_block bb;
870 same_succ same;
872 bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
873 bitmap_clear (deleted_bbs);
875 bitmap_clear_bit (deleted_bb_preds, ENTRY_BLOCK);
876 same_succ_flush_bbs (deleted_bb_preds);
878 same = same_succ_alloc ();
879 EXECUTE_IF_SET_IN_BITMAP (deleted_bb_preds, 0, i, bi)
881 bb = BASIC_BLOCK (i);
882 gcc_assert (bb != NULL);
883 find_same_succ_bb (bb, &same);
884 if (same == NULL)
885 same = same_succ_alloc ();
887 same_succ_def::remove (same);
888 bitmap_clear (deleted_bb_preds);
891 /* Prints cluster C to FILE. */
893 static void
894 print_cluster (FILE *file, bb_cluster c)
896 if (c == NULL)
897 return;
898 bitmap_print (file, c->bbs, "bbs:", "\n");
899 bitmap_print (file, c->preds, "preds:", "\n");
902 /* Prints cluster C to stderr. */
904 extern void debug_cluster (bb_cluster);
905 DEBUG_FUNCTION void
906 debug_cluster (bb_cluster c)
908 print_cluster (stderr, c);
911 /* Update C->rep_bb, given that BB is added to the cluster. */
913 static void
914 update_rep_bb (bb_cluster c, basic_block bb)
916 /* Initial. */
917 if (c->rep_bb == NULL)
919 c->rep_bb = bb;
920 return;
923 /* Current needs no deps, keep it. */
924 if (BB_DEP_BB (c->rep_bb) == NULL)
925 return;
927 /* Bb needs no deps, change rep_bb. */
928 if (BB_DEP_BB (bb) == NULL)
930 c->rep_bb = bb;
931 return;
934 /* Bb needs last deps earlier than current, change rep_bb. A potential
935 problem with this, is that the first deps might also be earlier, which
936 would mean we prefer longer lifetimes for the deps. To be able to check
937 for this, we would have to trace BB_FIRST_DEP_BB as well, besides
938 BB_DEP_BB, which is really BB_LAST_DEP_BB.
939 The benefit of choosing the bb with last deps earlier, is that it can
940 potentially be used as replacement for more bbs. */
941 if (dominated_by_p (CDI_DOMINATORS, BB_DEP_BB (c->rep_bb), BB_DEP_BB (bb)))
942 c->rep_bb = bb;
945 /* Add BB to cluster C. Sets BB in C->bbs, and preds of BB in C->preds. */
947 static void
948 add_bb_to_cluster (bb_cluster c, basic_block bb)
950 edge e;
951 edge_iterator ei;
953 bitmap_set_bit (c->bbs, bb->index);
955 FOR_EACH_EDGE (e, ei, bb->preds)
956 bitmap_set_bit (c->preds, e->src->index);
958 update_rep_bb (c, bb);
961 /* Allocate and init new cluster. */
963 static bb_cluster
964 new_cluster (void)
966 bb_cluster c;
967 c = XCNEW (struct bb_cluster_def);
968 c->bbs = BITMAP_ALLOC (NULL);
969 c->preds = BITMAP_ALLOC (NULL);
970 c->rep_bb = NULL;
971 return c;
974 /* Delete clusters. */
976 static void
977 delete_cluster (bb_cluster c)
979 if (c == NULL)
980 return;
981 BITMAP_FREE (c->bbs);
982 BITMAP_FREE (c->preds);
983 XDELETE (c);
987 /* Array that contains all clusters. */
989 static vec<bb_cluster> all_clusters;
991 /* Allocate all cluster vectors. */
993 static void
994 alloc_cluster_vectors (void)
996 all_clusters.create (n_basic_blocks);
999 /* Reset all cluster vectors. */
1001 static void
1002 reset_cluster_vectors (void)
1004 unsigned int i;
1005 basic_block bb;
1006 for (i = 0; i < all_clusters.length (); ++i)
1007 delete_cluster (all_clusters[i]);
1008 all_clusters.truncate (0);
1009 FOR_EACH_BB (bb)
1010 BB_CLUSTER (bb) = NULL;
1013 /* Delete all cluster vectors. */
1015 static void
1016 delete_cluster_vectors (void)
1018 unsigned int i;
1019 for (i = 0; i < all_clusters.length (); ++i)
1020 delete_cluster (all_clusters[i]);
1021 all_clusters.release ();
1024 /* Merge cluster C2 into C1. */
1026 static void
1027 merge_clusters (bb_cluster c1, bb_cluster c2)
1029 bitmap_ior_into (c1->bbs, c2->bbs);
1030 bitmap_ior_into (c1->preds, c2->preds);
1033 /* Register equivalence of BB1 and BB2 (members of cluster C). Store c in
1034 all_clusters, or merge c with existing cluster. */
1036 static void
1037 set_cluster (basic_block bb1, basic_block bb2)
1039 basic_block merge_bb, other_bb;
1040 bb_cluster merge, old, c;
1042 if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
1044 c = new_cluster ();
1045 add_bb_to_cluster (c, bb1);
1046 add_bb_to_cluster (c, bb2);
1047 BB_CLUSTER (bb1) = c;
1048 BB_CLUSTER (bb2) = c;
1049 c->index = all_clusters.length ();
1050 all_clusters.safe_push (c);
1052 else if (BB_CLUSTER (bb1) == NULL || BB_CLUSTER (bb2) == NULL)
1054 merge_bb = BB_CLUSTER (bb1) == NULL ? bb2 : bb1;
1055 other_bb = BB_CLUSTER (bb1) == NULL ? bb1 : bb2;
1056 merge = BB_CLUSTER (merge_bb);
1057 add_bb_to_cluster (merge, other_bb);
1058 BB_CLUSTER (other_bb) = merge;
1060 else if (BB_CLUSTER (bb1) != BB_CLUSTER (bb2))
1062 unsigned int i;
1063 bitmap_iterator bi;
1065 old = BB_CLUSTER (bb2);
1066 merge = BB_CLUSTER (bb1);
1067 merge_clusters (merge, old);
1068 EXECUTE_IF_SET_IN_BITMAP (old->bbs, 0, i, bi)
1069 BB_CLUSTER (BASIC_BLOCK (i)) = merge;
1070 all_clusters[old->index] = NULL;
1071 update_rep_bb (merge, old->rep_bb);
1072 delete_cluster (old);
1074 else
1075 gcc_unreachable ();
1078 /* Return true if gimple statements S1 and S2 are equal. Gimple_bb (s1) and
1079 gimple_bb (s2) are members of SAME_SUCC. */
1081 static bool
1082 gimple_equal_p (same_succ same_succ, gimple s1, gimple s2)
1084 unsigned int i;
1085 tree lhs1, lhs2;
1086 basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
1087 tree t1, t2;
1088 bool equal, inv_cond;
1089 enum tree_code code1, code2;
1091 if (gimple_code (s1) != gimple_code (s2))
1092 return false;
1094 switch (gimple_code (s1))
1096 case GIMPLE_CALL:
1097 if (gimple_call_num_args (s1) != gimple_call_num_args (s2))
1098 return false;
1099 if (!gimple_call_same_target_p (s1, s2))
1100 return false;
1102 /* Eventually, we'll significantly complicate the CFG by adding
1103 back edges to properly model the effects of transaction restart.
1104 For the bulk of optimization this does not matter, but what we
1105 cannot recover from is tail merging blocks between two separate
1106 transactions. Avoid that by making commit not match. */
1107 if (gimple_call_builtin_p (s1, BUILT_IN_TM_COMMIT))
1108 return false;
1110 equal = true;
1111 for (i = 0; i < gimple_call_num_args (s1); ++i)
1113 t1 = gimple_call_arg (s1, i);
1114 t2 = gimple_call_arg (s2, i);
1115 if (operand_equal_p (t1, t2, 0))
1116 continue;
1117 if (gvn_uses_equal (t1, t2))
1118 continue;
1119 equal = false;
1120 break;
1122 if (!equal)
1123 return false;
1125 lhs1 = gimple_get_lhs (s1);
1126 lhs2 = gimple_get_lhs (s2);
1127 if (lhs1 == NULL_TREE && lhs2 == NULL_TREE)
1128 return true;
1129 if (lhs1 == NULL_TREE || lhs2 == NULL_TREE)
1130 return false;
1131 if (TREE_CODE (lhs1) == SSA_NAME && TREE_CODE (lhs2) == SSA_NAME)
1132 return vn_valueize (lhs1) == vn_valueize (lhs2);
1133 return operand_equal_p (lhs1, lhs2, 0);
1135 case GIMPLE_ASSIGN:
1136 lhs1 = gimple_get_lhs (s1);
1137 lhs2 = gimple_get_lhs (s2);
1138 if (TREE_CODE (lhs1) != SSA_NAME
1139 && TREE_CODE (lhs2) != SSA_NAME)
1140 return (vn_valueize (gimple_vdef (s1))
1141 == vn_valueize (gimple_vdef (s2)));
1142 else if (TREE_CODE (lhs1) == SSA_NAME
1143 && TREE_CODE (lhs2) == SSA_NAME)
1144 return vn_valueize (lhs1) == vn_valueize (lhs2);
1145 return false;
1147 case GIMPLE_COND:
1148 t1 = gimple_cond_lhs (s1);
1149 t2 = gimple_cond_lhs (s2);
1150 if (!operand_equal_p (t1, t2, 0)
1151 && !gvn_uses_equal (t1, t2))
1152 return false;
1154 t1 = gimple_cond_rhs (s1);
1155 t2 = gimple_cond_rhs (s2);
1156 if (!operand_equal_p (t1, t2, 0)
1157 && !gvn_uses_equal (t1, t2))
1158 return false;
1160 code1 = gimple_expr_code (s1);
1161 code2 = gimple_expr_code (s2);
1162 inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
1163 != bitmap_bit_p (same_succ->inverse, bb2->index));
1164 if (inv_cond)
1166 bool honor_nans
1167 = HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (s1))));
1168 code2 = invert_tree_comparison (code2, honor_nans);
1170 return code1 == code2;
1172 default:
1173 return false;
1177 /* Let GSI skip backwards over local defs. Return the earliest vuse in VUSE.
1178 Return true in VUSE_ESCAPED if the vuse influenced a SSA_OP_DEF of one of the
1179 processed statements. */
1181 static void
1182 gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
1183 bool *vuse_escaped)
1185 gimple stmt;
1186 tree lvuse;
1188 while (true)
1190 if (gsi_end_p (*gsi))
1191 return;
1192 stmt = gsi_stmt (*gsi);
1194 lvuse = gimple_vuse (stmt);
1195 if (lvuse != NULL_TREE)
1197 *vuse = lvuse;
1198 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_DEF))
1199 *vuse_escaped = true;
1202 if (!stmt_local_def (stmt))
1203 return;
1204 gsi_prev_nondebug (gsi);
1208 /* Determines whether BB1 and BB2 (members of same_succ) are duplicates. If so,
1209 clusters them. */
1211 static void
1212 find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
1214 gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
1215 gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
1216 tree vuse1 = NULL_TREE, vuse2 = NULL_TREE;
1217 bool vuse_escaped = false;
1219 gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
1220 gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
1222 while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
1224 gimple stmt1 = gsi_stmt (gsi1);
1225 gimple stmt2 = gsi_stmt (gsi2);
1227 if (!gimple_equal_p (same_succ, stmt1, stmt2))
1228 return;
1230 // We cannot tail-merge the builtins that end transactions.
1231 // ??? The alternative being unsharing of BBs in the tm_init pass.
1232 if (flag_tm
1233 && is_gimple_call (stmt1)
1234 && (gimple_call_flags (stmt1) & ECF_TM_BUILTIN)
1235 && is_tm_ending_fndecl (gimple_call_fndecl (stmt1)))
1236 return;
1238 gsi_prev_nondebug (&gsi1);
1239 gsi_prev_nondebug (&gsi2);
1240 gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
1241 gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
1244 if (!(gsi_end_p (gsi1) && gsi_end_p (gsi2)))
1245 return;
1247 /* If the incoming vuses are not the same, and the vuse escaped into an
1248 SSA_OP_DEF, then merging the 2 blocks will change the value of the def,
1249 which potentially means the semantics of one of the blocks will be changed.
1250 TODO: make this check more precise. */
1251 if (vuse_escaped && vuse1 != vuse2)
1252 return;
1254 if (dump_file)
1255 fprintf (dump_file, "find_duplicates: <bb %d> duplicate of <bb %d>\n",
1256 bb1->index, bb2->index);
1258 set_cluster (bb1, bb2);
1261 /* Returns whether for all phis in DEST the phi alternatives for E1 and
1262 E2 are equal. */
1264 static bool
1265 same_phi_alternatives_1 (basic_block dest, edge e1, edge e2)
1267 int n1 = e1->dest_idx, n2 = e2->dest_idx;
1268 gimple_stmt_iterator gsi;
1270 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
1272 gimple phi = gsi_stmt (gsi);
1273 tree lhs = gimple_phi_result (phi);
1274 tree val1 = gimple_phi_arg_def (phi, n1);
1275 tree val2 = gimple_phi_arg_def (phi, n2);
1277 if (virtual_operand_p (lhs))
1278 continue;
1280 if (operand_equal_for_phi_arg_p (val1, val2))
1281 continue;
1282 if (gvn_uses_equal (val1, val2))
1283 continue;
1285 return false;
1288 return true;
1291 /* Returns whether for all successors of BB1 and BB2 (members of SAME_SUCC), the
1292 phi alternatives for BB1 and BB2 are equal. */
1294 static bool
1295 same_phi_alternatives (same_succ same_succ, basic_block bb1, basic_block bb2)
1297 unsigned int s;
1298 bitmap_iterator bs;
1299 edge e1, e2;
1300 basic_block succ;
1302 EXECUTE_IF_SET_IN_BITMAP (same_succ->succs, 0, s, bs)
1304 succ = BASIC_BLOCK (s);
1305 e1 = find_edge (bb1, succ);
1306 e2 = find_edge (bb2, succ);
1307 if (e1->flags & EDGE_COMPLEX
1308 || e2->flags & EDGE_COMPLEX)
1309 return false;
1311 /* For all phis in bb, the phi alternatives for e1 and e2 need to have
1312 the same value. */
1313 if (!same_phi_alternatives_1 (succ, e1, e2))
1314 return false;
1317 return true;
1320 /* Return true if BB has non-vop phis. */
1322 static bool
1323 bb_has_non_vop_phi (basic_block bb)
1325 gimple_seq phis = phi_nodes (bb);
1326 gimple phi;
1328 if (phis == NULL)
1329 return false;
1331 if (!gimple_seq_singleton_p (phis))
1332 return true;
1334 phi = gimple_seq_first_stmt (phis);
1335 return !virtual_operand_p (gimple_phi_result (phi));
1338 /* Returns true if redirecting the incoming edges of FROM to TO maintains the
1339 invariant that uses in FROM are dominates by their defs. */
1341 static bool
1342 deps_ok_for_redirect_from_bb_to_bb (basic_block from, basic_block to)
1344 basic_block cd, dep_bb = BB_DEP_BB (to);
1345 edge_iterator ei;
1346 edge e;
1347 bitmap from_preds = BITMAP_ALLOC (NULL);
1349 if (dep_bb == NULL)
1350 return true;
1352 FOR_EACH_EDGE (e, ei, from->preds)
1353 bitmap_set_bit (from_preds, e->src->index);
1354 cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
1355 BITMAP_FREE (from_preds);
1357 return dominated_by_p (CDI_DOMINATORS, dep_bb, cd);
1360 /* Returns true if replacing BB1 (or its replacement bb) by BB2 (or its
1361 replacement bb) and vice versa maintains the invariant that uses in the
1362 replacement are dominates by their defs. */
1364 static bool
1365 deps_ok_for_redirect (basic_block bb1, basic_block bb2)
1367 if (BB_CLUSTER (bb1) != NULL)
1368 bb1 = BB_CLUSTER (bb1)->rep_bb;
1370 if (BB_CLUSTER (bb2) != NULL)
1371 bb2 = BB_CLUSTER (bb2)->rep_bb;
1373 return (deps_ok_for_redirect_from_bb_to_bb (bb1, bb2)
1374 && deps_ok_for_redirect_from_bb_to_bb (bb2, bb1));
1377 /* Within SAME_SUCC->bbs, find clusters of bbs which can be merged. */
1379 static void
1380 find_clusters_1 (same_succ same_succ)
1382 basic_block bb1, bb2;
1383 unsigned int i, j;
1384 bitmap_iterator bi, bj;
1385 int nr_comparisons;
1386 int max_comparisons = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_COMPARISONS);
1388 EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
1390 bb1 = BASIC_BLOCK (i);
1392 /* TODO: handle blocks with phi-nodes. We'll have to find corresponding
1393 phi-nodes in bb1 and bb2, with the same alternatives for the same
1394 preds. */
1395 if (bb_has_non_vop_phi (bb1))
1396 continue;
1398 nr_comparisons = 0;
1399 EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, i + 1, j, bj)
1401 bb2 = BASIC_BLOCK (j);
1403 if (bb_has_non_vop_phi (bb2))
1404 continue;
1406 if (BB_CLUSTER (bb1) != NULL && BB_CLUSTER (bb1) == BB_CLUSTER (bb2))
1407 continue;
1409 /* Limit quadratic behaviour. */
1410 nr_comparisons++;
1411 if (nr_comparisons > max_comparisons)
1412 break;
1414 /* This is a conservative dependency check. We could test more
1415 precise for allowed replacement direction. */
1416 if (!deps_ok_for_redirect (bb1, bb2))
1417 continue;
1419 if (!(same_phi_alternatives (same_succ, bb1, bb2)))
1420 continue;
1422 find_duplicate (same_succ, bb1, bb2);
1427 /* Find clusters of bbs which can be merged. */
1429 static void
1430 find_clusters (void)
1432 same_succ same;
1434 while (!worklist.is_empty ())
1436 same = worklist.pop ();
1437 same->in_worklist = false;
1438 if (dump_file && (dump_flags & TDF_DETAILS))
1440 fprintf (dump_file, "processing worklist entry\n");
1441 same_succ_print (dump_file, same);
1443 find_clusters_1 (same);
1447 /* Returns the vop phi of BB, if any. */
1449 static gimple
1450 vop_phi (basic_block bb)
1452 gimple stmt;
1453 gimple_stmt_iterator gsi;
1454 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1456 stmt = gsi_stmt (gsi);
1457 if (! virtual_operand_p (gimple_phi_result (stmt)))
1458 continue;
1459 return stmt;
1461 return NULL;
1464 /* Redirect all edges from BB1 to BB2, removes BB1 and marks it as removed. */
1466 static void
1467 replace_block_by (basic_block bb1, basic_block bb2)
1469 edge pred_edge;
1470 edge e1, e2;
1471 edge_iterator ei;
1472 unsigned int i;
1473 gimple bb2_phi;
1475 bb2_phi = vop_phi (bb2);
1477 /* Mark the basic block as deleted. */
1478 mark_basic_block_deleted (bb1);
1480 /* Redirect the incoming edges of bb1 to bb2. */
1481 for (i = EDGE_COUNT (bb1->preds); i > 0 ; --i)
1483 pred_edge = EDGE_PRED (bb1, i - 1);
1484 pred_edge = redirect_edge_and_branch (pred_edge, bb2);
1485 gcc_assert (pred_edge != NULL);
1487 if (bb2_phi == NULL)
1488 continue;
1490 /* The phi might have run out of capacity when the redirect added an
1491 argument, which means it could have been replaced. Refresh it. */
1492 bb2_phi = vop_phi (bb2);
1494 add_phi_arg (bb2_phi, SSA_NAME_VAR (gimple_phi_result (bb2_phi)),
1495 pred_edge, UNKNOWN_LOCATION);
1498 bb2->frequency += bb1->frequency;
1499 if (bb2->frequency > BB_FREQ_MAX)
1500 bb2->frequency = BB_FREQ_MAX;
1502 bb2->count += bb1->count;
1504 /* Merge the outgoing edge counts from bb1 onto bb2. */
1505 gcov_type out_sum = 0;
1506 FOR_EACH_EDGE (e1, ei, bb1->succs)
1508 e2 = find_edge (bb2, e1->dest);
1509 gcc_assert (e2);
1510 e2->count += e1->count;
1511 out_sum += e2->count;
1513 /* Recompute the edge probabilities from the new merged edge count.
1514 Use the sum of the new merged edge counts computed above instead
1515 of bb2's merged count, in case there are profile count insanities
1516 making the bb count inconsistent with the edge weights. */
1517 FOR_EACH_EDGE (e2, ei, bb2->succs)
1519 e2->probability = GCOV_COMPUTE_SCALE (e2->count, out_sum);
1522 /* Do updates that use bb1, before deleting bb1. */
1523 release_last_vdef (bb1);
1524 same_succ_flush_bb (bb1);
1526 delete_basic_block (bb1);
1529 /* Bbs for which update_debug_stmt need to be called. */
1531 static bitmap update_bbs;
1533 /* For each cluster in all_clusters, merge all cluster->bbs. Returns
1534 number of bbs removed. */
1536 static int
1537 apply_clusters (void)
1539 basic_block bb1, bb2;
1540 bb_cluster c;
1541 unsigned int i, j;
1542 bitmap_iterator bj;
1543 int nr_bbs_removed = 0;
1545 for (i = 0; i < all_clusters.length (); ++i)
1547 c = all_clusters[i];
1548 if (c == NULL)
1549 continue;
1551 bb2 = c->rep_bb;
1552 bitmap_set_bit (update_bbs, bb2->index);
1554 bitmap_clear_bit (c->bbs, bb2->index);
1555 EXECUTE_IF_SET_IN_BITMAP (c->bbs, 0, j, bj)
1557 bb1 = BASIC_BLOCK (j);
1558 bitmap_clear_bit (update_bbs, bb1->index);
1560 replace_block_by (bb1, bb2);
1561 nr_bbs_removed++;
1565 return nr_bbs_removed;
1568 /* Resets debug statement STMT if it has uses that are not dominated by their
1569 defs. */
1571 static void
1572 update_debug_stmt (gimple stmt)
1574 use_operand_p use_p;
1575 ssa_op_iter oi;
1576 basic_block bbdef, bbuse;
1577 gimple def_stmt;
1578 tree name;
1580 if (!gimple_debug_bind_p (stmt))
1581 return;
1583 bbuse = gimple_bb (stmt);
1584 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
1586 name = USE_FROM_PTR (use_p);
1587 gcc_assert (TREE_CODE (name) == SSA_NAME);
1589 def_stmt = SSA_NAME_DEF_STMT (name);
1590 gcc_assert (def_stmt != NULL);
1592 bbdef = gimple_bb (def_stmt);
1593 if (bbdef == NULL || bbuse == bbdef
1594 || dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
1595 continue;
1597 gimple_debug_bind_reset_value (stmt);
1598 update_stmt (stmt);
1602 /* Resets all debug statements that have uses that are not
1603 dominated by their defs. */
1605 static void
1606 update_debug_stmts (void)
1608 basic_block bb;
1609 bitmap_iterator bi;
1610 unsigned int i;
1612 EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
1614 gimple stmt;
1615 gimple_stmt_iterator gsi;
1617 bb = BASIC_BLOCK (i);
1618 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1620 stmt = gsi_stmt (gsi);
1621 if (!is_gimple_debug (stmt))
1622 continue;
1623 update_debug_stmt (stmt);
1628 /* Runs tail merge optimization. */
1630 unsigned int
1631 tail_merge_optimize (unsigned int todo)
1633 int nr_bbs_removed_total = 0;
1634 int nr_bbs_removed;
1635 bool loop_entered = false;
1636 int iteration_nr = 0;
1637 int max_iterations = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_ITERATIONS);
1639 if (!flag_tree_tail_merge
1640 || max_iterations == 0
1641 /* We try to be conservative with respect to loop structure, since:
1642 - the cases where tail-merging could both affect loop structure and be
1643 beneficial are rare,
1644 - it prevents us from having to fixup the loops using
1645 loops_state_set (LOOPS_NEED_FIXUP), and
1646 - keeping loop structure may allow us to simplify the pass.
1647 In order to be conservative, we need loop information. In rare cases
1648 (about 7 test-cases in the g++ testsuite) there is none (because
1649 loop_optimizer_finalize has been called before tail-merge, and
1650 PROP_loops is not set), so we bail out. */
1651 || current_loops == NULL)
1652 return 0;
1654 timevar_push (TV_TREE_TAIL_MERGE);
1656 if (!dom_info_available_p (CDI_DOMINATORS))
1658 /* PRE can leave us with unreachable blocks, remove them now. */
1659 delete_unreachable_blocks ();
1660 calculate_dominance_info (CDI_DOMINATORS);
1662 init_worklist ();
1664 while (!worklist.is_empty ())
1666 if (!loop_entered)
1668 loop_entered = true;
1669 alloc_cluster_vectors ();
1670 update_bbs = BITMAP_ALLOC (NULL);
1672 else
1673 reset_cluster_vectors ();
1675 iteration_nr++;
1676 if (dump_file && (dump_flags & TDF_DETAILS))
1677 fprintf (dump_file, "worklist iteration #%d\n", iteration_nr);
1679 find_clusters ();
1680 gcc_assert (worklist.is_empty ());
1681 if (all_clusters.is_empty ())
1682 break;
1684 nr_bbs_removed = apply_clusters ();
1685 nr_bbs_removed_total += nr_bbs_removed;
1686 if (nr_bbs_removed == 0)
1687 break;
1689 free_dominance_info (CDI_DOMINATORS);
1691 if (iteration_nr == max_iterations)
1692 break;
1694 calculate_dominance_info (CDI_DOMINATORS);
1695 update_worklist ();
1698 if (dump_file && (dump_flags & TDF_DETAILS))
1699 fprintf (dump_file, "htab collision / search: %f\n",
1700 same_succ_htab.collisions ());
1702 if (nr_bbs_removed_total > 0)
1704 if (MAY_HAVE_DEBUG_STMTS)
1706 calculate_dominance_info (CDI_DOMINATORS);
1707 update_debug_stmts ();
1710 if (dump_file && (dump_flags & TDF_DETAILS))
1712 fprintf (dump_file, "Before TODOs.\n");
1713 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1716 todo |= (TODO_verify_ssa | TODO_verify_stmts | TODO_verify_flow);
1717 mark_virtual_operands_for_renaming (cfun);
1720 delete_worklist ();
1721 if (loop_entered)
1723 delete_cluster_vectors ();
1724 BITMAP_FREE (update_bbs);
1727 timevar_pop (TV_TREE_TAIL_MERGE);
1729 return todo;