2009-08-24 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-loop-manip.c
blobe43c0bc404a5479cfeb9209449fe94acff3ef75d
1 /* High-level loop manipulation functions.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "tree-pass.h"
36 #include "cfglayout.h"
37 #include "tree-scalar-evolution.h"
38 #include "params.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 /* Creates an induction variable with value BASE + STEP * iteration in LOOP.
43 It is expected that neither BASE nor STEP are shared with other expressions
44 (unless the sharing rules allow this). Use VAR as a base var_decl for it
45 (if NULL, a new temporary will be created). The increment will occur at
46 INCR_POS (after it if AFTER is true, before it otherwise). INCR_POS and
47 AFTER can be computed using standard_iv_increment_position. The ssa versions
48 of the variable before and after increment will be stored in VAR_BEFORE and
49 VAR_AFTER (unless they are NULL). */
51 void
52 create_iv (tree base, tree step, tree var, struct loop *loop,
53 gimple_stmt_iterator *incr_pos, bool after,
54 tree *var_before, tree *var_after)
56 gimple stmt;
57 tree initial, step1;
58 gimple_seq stmts;
59 tree vb, va;
60 enum tree_code incr_op = PLUS_EXPR;
61 edge pe = loop_preheader_edge (loop);
63 if (!var)
65 var = create_tmp_var (TREE_TYPE (base), "ivtmp");
66 add_referenced_var (var);
69 vb = make_ssa_name (var, NULL);
70 if (var_before)
71 *var_before = vb;
72 va = make_ssa_name (var, NULL);
73 if (var_after)
74 *var_after = va;
76 /* For easier readability of the created code, produce MINUS_EXPRs
77 when suitable. */
78 if (TREE_CODE (step) == INTEGER_CST)
80 if (TYPE_UNSIGNED (TREE_TYPE (step)))
82 step1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
83 if (tree_int_cst_lt (step1, step))
85 incr_op = MINUS_EXPR;
86 step = step1;
89 else
91 bool ovf;
93 if (!tree_expr_nonnegative_warnv_p (step, &ovf)
94 && may_negate_without_overflow_p (step))
96 incr_op = MINUS_EXPR;
97 step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
101 if (POINTER_TYPE_P (TREE_TYPE (base)))
103 if (TREE_CODE (base) == ADDR_EXPR)
104 mark_addressable (TREE_OPERAND (base, 0));
105 step = fold_convert (sizetype, step);
106 if (incr_op == MINUS_EXPR)
107 step = fold_build1 (NEGATE_EXPR, sizetype, step);
108 incr_op = POINTER_PLUS_EXPR;
110 /* Gimplify the step if necessary. We put the computations in front of the
111 loop (i.e. the step should be loop invariant). */
112 step = force_gimple_operand (step, &stmts, true, NULL_TREE);
113 if (stmts)
114 gsi_insert_seq_on_edge_immediate (pe, stmts);
116 stmt = gimple_build_assign_with_ops (incr_op, va, vb, step);
117 if (after)
118 gsi_insert_after (incr_pos, stmt, GSI_NEW_STMT);
119 else
120 gsi_insert_before (incr_pos, stmt, GSI_NEW_STMT);
122 initial = force_gimple_operand (base, &stmts, true, var);
123 if (stmts)
124 gsi_insert_seq_on_edge_immediate (pe, stmts);
126 stmt = create_phi_node (vb, loop->header);
127 SSA_NAME_DEF_STMT (vb) = stmt;
128 add_phi_arg (stmt, initial, loop_preheader_edge (loop), UNKNOWN_LOCATION);
129 add_phi_arg (stmt, va, loop_latch_edge (loop), UNKNOWN_LOCATION);
132 /* Add exit phis for the USE on EXIT. */
134 static void
135 add_exit_phis_edge (basic_block exit, tree use)
137 gimple phi, def_stmt = SSA_NAME_DEF_STMT (use);
138 basic_block def_bb = gimple_bb (def_stmt);
139 struct loop *def_loop;
140 edge e;
141 edge_iterator ei;
143 /* Check that some of the edges entering the EXIT block exits a loop in
144 that USE is defined. */
145 FOR_EACH_EDGE (e, ei, exit->preds)
147 def_loop = find_common_loop (def_bb->loop_father, e->src->loop_father);
148 if (!flow_bb_inside_loop_p (def_loop, e->dest))
149 break;
152 if (!e)
153 return;
155 phi = create_phi_node (use, exit);
156 create_new_def_for (gimple_phi_result (phi), phi,
157 gimple_phi_result_ptr (phi));
158 FOR_EACH_EDGE (e, ei, exit->preds)
159 add_phi_arg (phi, use, e, UNKNOWN_LOCATION);
162 /* Add exit phis for VAR that is used in LIVEIN.
163 Exits of the loops are stored in EXITS. */
165 static void
166 add_exit_phis_var (tree var, bitmap livein, bitmap exits)
168 bitmap def;
169 unsigned index;
170 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
171 bitmap_iterator bi;
173 if (is_gimple_reg (var))
174 bitmap_clear_bit (livein, def_bb->index);
175 else
176 bitmap_set_bit (livein, def_bb->index);
178 def = BITMAP_ALLOC (NULL);
179 bitmap_set_bit (def, def_bb->index);
180 compute_global_livein (livein, def);
181 BITMAP_FREE (def);
183 EXECUTE_IF_AND_IN_BITMAP (exits, livein, 0, index, bi)
185 add_exit_phis_edge (BASIC_BLOCK (index), var);
189 /* Add exit phis for the names marked in NAMES_TO_RENAME.
190 Exits of the loops are stored in EXITS. Sets of blocks where the ssa
191 names are used are stored in USE_BLOCKS. */
193 static void
194 add_exit_phis (bitmap names_to_rename, bitmap *use_blocks, bitmap loop_exits)
196 unsigned i;
197 bitmap_iterator bi;
199 EXECUTE_IF_SET_IN_BITMAP (names_to_rename, 0, i, bi)
201 add_exit_phis_var (ssa_name (i), use_blocks[i], loop_exits);
205 /* Returns a bitmap of all loop exit edge targets. */
207 static bitmap
208 get_loops_exits (void)
210 bitmap exits = BITMAP_ALLOC (NULL);
211 basic_block bb;
212 edge e;
213 edge_iterator ei;
215 FOR_EACH_BB (bb)
217 FOR_EACH_EDGE (e, ei, bb->preds)
218 if (e->src != ENTRY_BLOCK_PTR
219 && !flow_bb_inside_loop_p (e->src->loop_father, bb))
221 bitmap_set_bit (exits, bb->index);
222 break;
226 return exits;
229 /* For USE in BB, if it is used outside of the loop it is defined in,
230 mark it for rewrite. Record basic block BB where it is used
231 to USE_BLOCKS. Record the ssa name index to NEED_PHIS bitmap. */
233 static void
234 find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
235 bitmap need_phis)
237 unsigned ver;
238 basic_block def_bb;
239 struct loop *def_loop;
241 if (TREE_CODE (use) != SSA_NAME)
242 return;
244 /* We don't need to keep virtual operands in loop-closed form. */
245 if (!is_gimple_reg (use))
246 return;
248 ver = SSA_NAME_VERSION (use);
249 def_bb = gimple_bb (SSA_NAME_DEF_STMT (use));
250 if (!def_bb)
251 return;
252 def_loop = def_bb->loop_father;
254 /* If the definition is not inside a loop, it is not interesting. */
255 if (!loop_outer (def_loop))
256 return;
258 /* If the use is not outside of the loop it is defined in, it is not
259 interesting. */
260 if (flow_bb_inside_loop_p (def_loop, bb))
261 return;
263 if (!use_blocks[ver])
264 use_blocks[ver] = BITMAP_ALLOC (NULL);
265 bitmap_set_bit (use_blocks[ver], bb->index);
267 bitmap_set_bit (need_phis, ver);
270 /* For uses in STMT, mark names that are used outside of the loop they are
271 defined to rewrite. Record the set of blocks in that the ssa
272 names are defined to USE_BLOCKS and the ssa names themselves to
273 NEED_PHIS. */
275 static void
276 find_uses_to_rename_stmt (gimple stmt, bitmap *use_blocks, bitmap need_phis)
278 ssa_op_iter iter;
279 tree var;
280 basic_block bb = gimple_bb (stmt);
282 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
283 find_uses_to_rename_use (bb, var, use_blocks, need_phis);
286 /* Marks names that are used in BB and outside of the loop they are
287 defined in for rewrite. Records the set of blocks in that the ssa
288 names are defined to USE_BLOCKS. Record the SSA names that will
289 need exit PHIs in NEED_PHIS. */
291 static void
292 find_uses_to_rename_bb (basic_block bb, bitmap *use_blocks, bitmap need_phis)
294 gimple_stmt_iterator bsi;
295 edge e;
296 edge_iterator ei;
298 FOR_EACH_EDGE (e, ei, bb->succs)
299 for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
300 find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (gsi_stmt (bsi), e),
301 use_blocks, need_phis);
303 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
304 find_uses_to_rename_stmt (gsi_stmt (bsi), use_blocks, need_phis);
307 /* Marks names that are used outside of the loop they are defined in
308 for rewrite. Records the set of blocks in that the ssa
309 names are defined to USE_BLOCKS. If CHANGED_BBS is not NULL,
310 scan only blocks in this set. */
312 static void
313 find_uses_to_rename (bitmap changed_bbs, bitmap *use_blocks, bitmap need_phis)
315 basic_block bb;
316 unsigned index;
317 bitmap_iterator bi;
319 if (changed_bbs && !bitmap_empty_p (changed_bbs))
321 EXECUTE_IF_SET_IN_BITMAP (changed_bbs, 0, index, bi)
323 find_uses_to_rename_bb (BASIC_BLOCK (index), use_blocks, need_phis);
326 else
328 FOR_EACH_BB (bb)
330 find_uses_to_rename_bb (bb, use_blocks, need_phis);
335 /* Rewrites the program into a loop closed ssa form -- i.e. inserts extra
336 phi nodes to ensure that no variable is used outside the loop it is
337 defined in.
339 This strengthening of the basic ssa form has several advantages:
341 1) Updating it during unrolling/peeling/versioning is trivial, since
342 we do not need to care about the uses outside of the loop.
343 2) The behavior of all uses of an induction variable is the same.
344 Without this, you need to distinguish the case when the variable
345 is used outside of the loop it is defined in, for example
347 for (i = 0; i < 100; i++)
349 for (j = 0; j < 100; j++)
351 k = i + j;
352 use1 (k);
354 use2 (k);
357 Looking from the outer loop with the normal SSA form, the first use of k
358 is not well-behaved, while the second one is an induction variable with
359 base 99 and step 1.
361 If CHANGED_BBS is not NULL, we look for uses outside loops only in
362 the basic blocks in this set.
364 UPDATE_FLAG is used in the call to update_ssa. See
365 TODO_update_ssa* for documentation. */
367 void
368 rewrite_into_loop_closed_ssa (bitmap changed_bbs, unsigned update_flag)
370 bitmap loop_exits;
371 bitmap *use_blocks;
372 unsigned i, old_num_ssa_names;
373 bitmap names_to_rename;
375 loops_state_set (LOOP_CLOSED_SSA);
376 if (number_of_loops () <= 1)
377 return;
379 loop_exits = get_loops_exits ();
380 names_to_rename = BITMAP_ALLOC (NULL);
382 /* If the pass has caused the SSA form to be out-of-date, update it
383 now. */
384 update_ssa (update_flag);
386 old_num_ssa_names = num_ssa_names;
387 use_blocks = XCNEWVEC (bitmap, old_num_ssa_names);
389 /* Find the uses outside loops. */
390 find_uses_to_rename (changed_bbs, use_blocks, names_to_rename);
392 /* Add the PHI nodes on exits of the loops for the names we need to
393 rewrite. */
394 add_exit_phis (names_to_rename, use_blocks, loop_exits);
396 for (i = 0; i < old_num_ssa_names; i++)
397 BITMAP_FREE (use_blocks[i]);
398 free (use_blocks);
399 BITMAP_FREE (loop_exits);
400 BITMAP_FREE (names_to_rename);
402 /* Fix up all the names found to be used outside their original
403 loops. */
404 update_ssa (TODO_update_ssa);
407 /* Check invariants of the loop closed ssa form for the USE in BB. */
409 static void
410 check_loop_closed_ssa_use (basic_block bb, tree use)
412 gimple def;
413 basic_block def_bb;
415 if (TREE_CODE (use) != SSA_NAME || !is_gimple_reg (use))
416 return;
418 def = SSA_NAME_DEF_STMT (use);
419 def_bb = gimple_bb (def);
420 gcc_assert (!def_bb
421 || flow_bb_inside_loop_p (def_bb->loop_father, bb));
424 /* Checks invariants of loop closed ssa form in statement STMT in BB. */
426 static void
427 check_loop_closed_ssa_stmt (basic_block bb, gimple stmt)
429 ssa_op_iter iter;
430 tree var;
432 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
433 check_loop_closed_ssa_use (bb, var);
436 /* Checks that invariants of the loop closed ssa form are preserved. */
438 void
439 verify_loop_closed_ssa (void)
441 basic_block bb;
442 gimple_stmt_iterator bsi;
443 gimple phi;
444 edge e;
445 edge_iterator ei;
447 if (number_of_loops () <= 1)
448 return;
450 verify_ssa (false);
452 FOR_EACH_BB (bb)
454 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
456 phi = gsi_stmt (bsi);
457 FOR_EACH_EDGE (e, ei, bb->preds)
458 check_loop_closed_ssa_use (e->src,
459 PHI_ARG_DEF_FROM_EDGE (phi, e));
462 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
463 check_loop_closed_ssa_stmt (bb, gsi_stmt (bsi));
467 /* Split loop exit edge EXIT. The things are a bit complicated by a need to
468 preserve the loop closed ssa form. The newly created block is returned. */
470 basic_block
471 split_loop_exit_edge (edge exit)
473 basic_block dest = exit->dest;
474 basic_block bb = split_edge (exit);
475 gimple phi, new_phi;
476 tree new_name, name;
477 use_operand_p op_p;
478 gimple_stmt_iterator psi;
479 source_location locus;
481 for (psi = gsi_start_phis (dest); !gsi_end_p (psi); gsi_next (&psi))
483 phi = gsi_stmt (psi);
484 op_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (bb));
485 locus = gimple_phi_arg_location_from_edge (phi, single_succ_edge (bb));
487 name = USE_FROM_PTR (op_p);
489 /* If the argument of the PHI node is a constant, we do not need
490 to keep it inside loop. */
491 if (TREE_CODE (name) != SSA_NAME)
492 continue;
494 /* Otherwise create an auxiliary phi node that will copy the value
495 of the SSA name out of the loop. */
496 new_name = duplicate_ssa_name (name, NULL);
497 new_phi = create_phi_node (new_name, bb);
498 SSA_NAME_DEF_STMT (new_name) = new_phi;
499 add_phi_arg (new_phi, name, exit, locus);
500 SET_USE (op_p, new_name);
503 return bb;
506 /* Returns the basic block in that statements should be emitted for induction
507 variables incremented at the end of the LOOP. */
509 basic_block
510 ip_end_pos (struct loop *loop)
512 return loop->latch;
515 /* Returns the basic block in that statements should be emitted for induction
516 variables incremented just before exit condition of a LOOP. */
518 basic_block
519 ip_normal_pos (struct loop *loop)
521 gimple last;
522 basic_block bb;
523 edge exit;
525 if (!single_pred_p (loop->latch))
526 return NULL;
528 bb = single_pred (loop->latch);
529 last = last_stmt (bb);
530 if (!last
531 || gimple_code (last) != GIMPLE_COND)
532 return NULL;
534 exit = EDGE_SUCC (bb, 0);
535 if (exit->dest == loop->latch)
536 exit = EDGE_SUCC (bb, 1);
538 if (flow_bb_inside_loop_p (loop, exit->dest))
539 return NULL;
541 return bb;
544 /* Stores the standard position for induction variable increment in LOOP
545 (just before the exit condition if it is available and latch block is empty,
546 end of the latch block otherwise) to BSI. INSERT_AFTER is set to true if
547 the increment should be inserted after *BSI. */
549 void
550 standard_iv_increment_position (struct loop *loop, gimple_stmt_iterator *bsi,
551 bool *insert_after)
553 basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
554 gimple last = last_stmt (latch);
556 if (!bb
557 || (last && gimple_code (last) != GIMPLE_LABEL))
559 *bsi = gsi_last_bb (latch);
560 *insert_after = true;
562 else
564 *bsi = gsi_last_bb (bb);
565 *insert_after = false;
569 /* Copies phi node arguments for duplicated blocks. The index of the first
570 duplicated block is FIRST_NEW_BLOCK. */
572 static void
573 copy_phi_node_args (unsigned first_new_block)
575 unsigned i;
577 for (i = first_new_block; i < (unsigned) last_basic_block; i++)
578 BASIC_BLOCK (i)->flags |= BB_DUPLICATED;
580 for (i = first_new_block; i < (unsigned) last_basic_block; i++)
581 add_phi_args_after_copy_bb (BASIC_BLOCK (i));
583 for (i = first_new_block; i < (unsigned) last_basic_block; i++)
584 BASIC_BLOCK (i)->flags &= ~BB_DUPLICATED;
588 /* The same as cfgloopmanip.c:duplicate_loop_to_header_edge, but also
589 updates the PHI nodes at start of the copied region. In order to
590 achieve this, only loops whose exits all lead to the same location
591 are handled.
593 Notice that we do not completely update the SSA web after
594 duplication. The caller is responsible for calling update_ssa
595 after the loop has been duplicated. */
597 bool
598 gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
599 unsigned int ndupl, sbitmap wont_exit,
600 edge orig, VEC (edge, heap) **to_remove,
601 int flags)
603 unsigned first_new_block;
605 if (!loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
606 return false;
607 if (!loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
608 return false;
610 #ifdef ENABLE_CHECKING
611 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
612 verify_loop_closed_ssa ();
613 #endif
615 first_new_block = last_basic_block;
616 if (!duplicate_loop_to_header_edge (loop, e, ndupl, wont_exit,
617 orig, to_remove, flags))
618 return false;
620 /* Readd the removed phi args for e. */
621 flush_pending_stmts (e);
623 /* Copy the phi node arguments. */
624 copy_phi_node_args (first_new_block);
626 scev_reset ();
628 return true;
631 /* Returns true if we can unroll LOOP FACTOR times. Number
632 of iterations of the loop is returned in NITER. */
634 bool
635 can_unroll_loop_p (struct loop *loop, unsigned factor,
636 struct tree_niter_desc *niter)
638 edge exit;
640 /* Check whether unrolling is possible. We only want to unroll loops
641 for that we are able to determine number of iterations. We also
642 want to split the extra iterations of the loop from its end,
643 therefore we require that the loop has precisely one
644 exit. */
646 exit = single_dom_exit (loop);
647 if (!exit)
648 return false;
650 if (!number_of_iterations_exit (loop, exit, niter, false)
651 || niter->cmp == ERROR_MARK
652 /* Scalar evolutions analysis might have copy propagated
653 the abnormal ssa names into these expressions, hence
654 emitting the computations based on them during loop
655 unrolling might create overlapping life ranges for
656 them, and failures in out-of-ssa. */
657 || contains_abnormal_ssa_name_p (niter->may_be_zero)
658 || contains_abnormal_ssa_name_p (niter->control.base)
659 || contains_abnormal_ssa_name_p (niter->control.step)
660 || contains_abnormal_ssa_name_p (niter->bound))
661 return false;
663 /* And of course, we must be able to duplicate the loop. */
664 if (!can_duplicate_loop_p (loop))
665 return false;
667 /* The final loop should be small enough. */
668 if (tree_num_loop_insns (loop, &eni_size_weights) * factor
669 > (unsigned) PARAM_VALUE (PARAM_MAX_UNROLLED_INSNS))
670 return false;
672 return true;
675 /* Determines the conditions that control execution of LOOP unrolled FACTOR
676 times. DESC is number of iterations of LOOP. ENTER_COND is set to
677 condition that must be true if the main loop can be entered.
678 EXIT_BASE, EXIT_STEP, EXIT_CMP and EXIT_BOUND are set to values describing
679 how the exit from the unrolled loop should be controlled. */
681 static void
682 determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
683 unsigned factor, tree *enter_cond,
684 tree *exit_base, tree *exit_step,
685 enum tree_code *exit_cmp, tree *exit_bound)
687 gimple_seq stmts;
688 tree base = desc->control.base;
689 tree step = desc->control.step;
690 tree bound = desc->bound;
691 tree type = TREE_TYPE (step);
692 tree bigstep, delta;
693 tree min = lower_bound_in_type (type, type);
694 tree max = upper_bound_in_type (type, type);
695 enum tree_code cmp = desc->cmp;
696 tree cond = boolean_true_node, assum;
698 /* For pointers, do the arithmetics in the type of step (sizetype). */
699 base = fold_convert (type, base);
700 bound = fold_convert (type, bound);
702 *enter_cond = boolean_false_node;
703 *exit_base = NULL_TREE;
704 *exit_step = NULL_TREE;
705 *exit_cmp = ERROR_MARK;
706 *exit_bound = NULL_TREE;
707 gcc_assert (cmp != ERROR_MARK);
709 /* We only need to be correct when we answer question
710 "Do at least FACTOR more iterations remain?" in the unrolled loop.
711 Thus, transforming BASE + STEP * i <> BOUND to
712 BASE + STEP * i < BOUND is ok. */
713 if (cmp == NE_EXPR)
715 if (tree_int_cst_sign_bit (step))
716 cmp = GT_EXPR;
717 else
718 cmp = LT_EXPR;
720 else if (cmp == LT_EXPR)
722 gcc_assert (!tree_int_cst_sign_bit (step));
724 else if (cmp == GT_EXPR)
726 gcc_assert (tree_int_cst_sign_bit (step));
728 else
729 gcc_unreachable ();
731 /* The main body of the loop may be entered iff:
733 1) desc->may_be_zero is false.
734 2) it is possible to check that there are at least FACTOR iterations
735 of the loop, i.e., BOUND - step * FACTOR does not overflow.
736 3) # of iterations is at least FACTOR */
738 if (!integer_zerop (desc->may_be_zero))
739 cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
740 invert_truthvalue (desc->may_be_zero),
741 cond);
743 bigstep = fold_build2 (MULT_EXPR, type, step,
744 build_int_cst_type (type, factor));
745 delta = fold_build2 (MINUS_EXPR, type, bigstep, step);
746 if (cmp == LT_EXPR)
747 assum = fold_build2 (GE_EXPR, boolean_type_node,
748 bound,
749 fold_build2 (PLUS_EXPR, type, min, delta));
750 else
751 assum = fold_build2 (LE_EXPR, boolean_type_node,
752 bound,
753 fold_build2 (PLUS_EXPR, type, max, delta));
754 cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
756 bound = fold_build2 (MINUS_EXPR, type, bound, delta);
757 assum = fold_build2 (cmp, boolean_type_node, base, bound);
758 cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
760 cond = force_gimple_operand (unshare_expr (cond), &stmts, false, NULL_TREE);
761 if (stmts)
762 gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
763 /* cond now may be a gimple comparison, which would be OK, but also any
764 other gimple rhs (say a && b). In this case we need to force it to
765 operand. */
766 if (!is_gimple_condexpr (cond))
768 cond = force_gimple_operand (cond, &stmts, true, NULL_TREE);
769 if (stmts)
770 gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
772 *enter_cond = cond;
774 base = force_gimple_operand (unshare_expr (base), &stmts, true, NULL_TREE);
775 if (stmts)
776 gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
777 bound = force_gimple_operand (unshare_expr (bound), &stmts, true, NULL_TREE);
778 if (stmts)
779 gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
781 *exit_base = base;
782 *exit_step = bigstep;
783 *exit_cmp = cmp;
784 *exit_bound = bound;
787 /* Scales the frequencies of all basic blocks in LOOP that are strictly
788 dominated by BB by NUM/DEN. */
790 static void
791 scale_dominated_blocks_in_loop (struct loop *loop, basic_block bb,
792 int num, int den)
794 basic_block son;
796 if (den == 0)
797 return;
799 for (son = first_dom_son (CDI_DOMINATORS, bb);
800 son;
801 son = next_dom_son (CDI_DOMINATORS, son))
803 if (!flow_bb_inside_loop_p (loop, son))
804 continue;
805 scale_bbs_frequencies_int (&son, 1, num, den);
806 scale_dominated_blocks_in_loop (loop, son, num, den);
810 /* Unroll LOOP FACTOR times. DESC describes number of iterations of LOOP.
811 EXIT is the exit of the loop to that DESC corresponds.
813 If N is number of iterations of the loop and MAY_BE_ZERO is the condition
814 under that loop exits in the first iteration even if N != 0,
816 while (1)
818 x = phi (init, next);
820 pre;
821 if (st)
822 break;
823 post;
826 becomes (with possibly the exit conditions formulated a bit differently,
827 avoiding the need to create a new iv):
829 if (MAY_BE_ZERO || N < FACTOR)
830 goto rest;
834 x = phi (init, next);
836 pre;
837 post;
838 pre;
839 post;
841 pre;
842 post;
843 N -= FACTOR;
845 } while (N >= FACTOR);
847 rest:
848 init' = phi (init, x);
850 while (1)
852 x = phi (init', next);
854 pre;
855 if (st)
856 break;
857 post;
860 Before the loop is unrolled, TRANSFORM is called for it (only for the
861 unrolled loop, but not for its versioned copy). DATA is passed to
862 TRANSFORM. */
864 /* Probability in % that the unrolled loop is entered. Just a guess. */
865 #define PROB_UNROLLED_LOOP_ENTERED 90
867 void
868 tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
869 edge exit, struct tree_niter_desc *desc,
870 transform_callback transform,
871 void *data)
873 gimple exit_if;
874 tree ctr_before, ctr_after;
875 tree enter_main_cond, exit_base, exit_step, exit_bound;
876 enum tree_code exit_cmp;
877 gimple phi_old_loop, phi_new_loop, phi_rest;
878 gimple_stmt_iterator psi_old_loop, psi_new_loop;
879 tree init, next, new_init, var;
880 struct loop *new_loop;
881 basic_block rest, exit_bb;
882 edge old_entry, new_entry, old_latch, precond_edge, new_exit;
883 edge new_nonexit, e;
884 gimple_stmt_iterator bsi;
885 use_operand_p op;
886 bool ok;
887 unsigned est_niter, prob_entry, scale_unrolled, scale_rest, freq_e, freq_h;
888 unsigned new_est_niter, i, prob;
889 unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
890 sbitmap wont_exit;
891 VEC (edge, heap) *to_remove = NULL;
893 est_niter = expected_loop_iterations (loop);
894 determine_exit_conditions (loop, desc, factor,
895 &enter_main_cond, &exit_base, &exit_step,
896 &exit_cmp, &exit_bound);
898 /* Let us assume that the unrolled loop is quite likely to be entered. */
899 if (integer_nonzerop (enter_main_cond))
900 prob_entry = REG_BR_PROB_BASE;
901 else
902 prob_entry = PROB_UNROLLED_LOOP_ENTERED * REG_BR_PROB_BASE / 100;
904 /* The values for scales should keep profile consistent, and somewhat close
905 to correct.
907 TODO: The current value of SCALE_REST makes it appear that the loop that
908 is created by splitting the remaining iterations of the unrolled loop is
909 executed the same number of times as the original loop, and with the same
910 frequencies, which is obviously wrong. This does not appear to cause
911 problems, so we do not bother with fixing it for now. To make the profile
912 correct, we would need to change the probability of the exit edge of the
913 loop, and recompute the distribution of frequencies in its body because
914 of this change (scale the frequencies of blocks before and after the exit
915 by appropriate factors). */
916 scale_unrolled = prob_entry;
917 scale_rest = REG_BR_PROB_BASE;
919 new_loop = loop_version (loop, enter_main_cond, NULL,
920 prob_entry, scale_unrolled, scale_rest, true);
921 gcc_assert (new_loop != NULL);
922 update_ssa (TODO_update_ssa);
924 /* Determine the probability of the exit edge of the unrolled loop. */
925 new_est_niter = est_niter / factor;
927 /* Without profile feedback, loops for that we do not know a better estimate
928 are assumed to roll 10 times. When we unroll such loop, it appears to
929 roll too little, and it may even seem to be cold. To avoid this, we
930 ensure that the created loop appears to roll at least 5 times (but at
931 most as many times as before unrolling). */
932 if (new_est_niter < 5)
934 if (est_niter < 5)
935 new_est_niter = est_niter;
936 else
937 new_est_niter = 5;
940 /* Prepare the cfg and update the phi nodes. Move the loop exit to the
941 loop latch (and make its condition dummy, for the moment). */
942 rest = loop_preheader_edge (new_loop)->src;
943 precond_edge = single_pred_edge (rest);
944 split_edge (loop_latch_edge (loop));
945 exit_bb = single_pred (loop->latch);
947 /* Since the exit edge will be removed, the frequency of all the blocks
948 in the loop that are dominated by it must be scaled by
949 1 / (1 - exit->probability). */
950 scale_dominated_blocks_in_loop (loop, exit->src,
951 REG_BR_PROB_BASE,
952 REG_BR_PROB_BASE - exit->probability);
954 bsi = gsi_last_bb (exit_bb);
955 exit_if = gimple_build_cond (EQ_EXPR, integer_zero_node,
956 integer_zero_node,
957 NULL_TREE, NULL_TREE);
959 gsi_insert_after (&bsi, exit_if, GSI_NEW_STMT);
960 new_exit = make_edge (exit_bb, rest, EDGE_FALSE_VALUE | irr);
961 rescan_loop_exit (new_exit, true, false);
963 /* Set the probability of new exit to the same of the old one. Fix
964 the frequency of the latch block, by scaling it back by
965 1 - exit->probability. */
966 new_exit->count = exit->count;
967 new_exit->probability = exit->probability;
968 new_nonexit = single_pred_edge (loop->latch);
969 new_nonexit->probability = REG_BR_PROB_BASE - exit->probability;
970 new_nonexit->flags = EDGE_TRUE_VALUE;
971 new_nonexit->count -= exit->count;
972 if (new_nonexit->count < 0)
973 new_nonexit->count = 0;
974 scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
975 REG_BR_PROB_BASE);
977 old_entry = loop_preheader_edge (loop);
978 new_entry = loop_preheader_edge (new_loop);
979 old_latch = loop_latch_edge (loop);
980 for (psi_old_loop = gsi_start_phis (loop->header),
981 psi_new_loop = gsi_start_phis (new_loop->header);
982 !gsi_end_p (psi_old_loop);
983 gsi_next (&psi_old_loop), gsi_next (&psi_new_loop))
985 phi_old_loop = gsi_stmt (psi_old_loop);
986 phi_new_loop = gsi_stmt (psi_new_loop);
988 init = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_entry);
989 op = PHI_ARG_DEF_PTR_FROM_EDGE (phi_new_loop, new_entry);
990 gcc_assert (operand_equal_for_phi_arg_p (init, USE_FROM_PTR (op)));
991 next = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_latch);
993 /* Prefer using original variable as a base for the new ssa name.
994 This is necessary for virtual ops, and useful in order to avoid
995 losing debug info for real ops. */
996 if (TREE_CODE (next) == SSA_NAME
997 && useless_type_conversion_p (TREE_TYPE (next),
998 TREE_TYPE (init)))
999 var = SSA_NAME_VAR (next);
1000 else if (TREE_CODE (init) == SSA_NAME
1001 && useless_type_conversion_p (TREE_TYPE (init),
1002 TREE_TYPE (next)))
1003 var = SSA_NAME_VAR (init);
1004 else if (useless_type_conversion_p (TREE_TYPE (next), TREE_TYPE (init)))
1006 var = create_tmp_var (TREE_TYPE (next), "unrinittmp");
1007 add_referenced_var (var);
1009 else
1011 var = create_tmp_var (TREE_TYPE (init), "unrinittmp");
1012 add_referenced_var (var);
1015 new_init = make_ssa_name (var, NULL);
1016 phi_rest = create_phi_node (new_init, rest);
1017 SSA_NAME_DEF_STMT (new_init) = phi_rest;
1019 add_phi_arg (phi_rest, init, precond_edge, UNKNOWN_LOCATION);
1020 add_phi_arg (phi_rest, next, new_exit, UNKNOWN_LOCATION);
1021 SET_USE (op, new_init);
1024 remove_path (exit);
1026 /* Transform the loop. */
1027 if (transform)
1028 (*transform) (loop, data);
1030 /* Unroll the loop and remove the exits in all iterations except for the
1031 last one. */
1032 wont_exit = sbitmap_alloc (factor);
1033 sbitmap_ones (wont_exit);
1034 RESET_BIT (wont_exit, factor - 1);
1036 ok = gimple_duplicate_loop_to_header_edge
1037 (loop, loop_latch_edge (loop), factor - 1,
1038 wont_exit, new_exit, &to_remove, DLTHE_FLAG_UPDATE_FREQ);
1039 free (wont_exit);
1040 gcc_assert (ok);
1042 for (i = 0; VEC_iterate (edge, to_remove, i, e); i++)
1044 ok = remove_path (e);
1045 gcc_assert (ok);
1047 VEC_free (edge, heap, to_remove);
1048 update_ssa (TODO_update_ssa);
1050 /* Ensure that the frequencies in the loop match the new estimated
1051 number of iterations, and change the probability of the new
1052 exit edge. */
1053 freq_h = loop->header->frequency;
1054 freq_e = EDGE_FREQUENCY (loop_preheader_edge (loop));
1055 if (freq_h != 0)
1056 scale_loop_frequencies (loop, freq_e * (new_est_niter + 1), freq_h);
1058 exit_bb = single_pred (loop->latch);
1059 new_exit = find_edge (exit_bb, rest);
1060 new_exit->count = loop_preheader_edge (loop)->count;
1061 new_exit->probability = REG_BR_PROB_BASE / (new_est_niter + 1);
1063 rest->count += new_exit->count;
1064 rest->frequency += EDGE_FREQUENCY (new_exit);
1066 new_nonexit = single_pred_edge (loop->latch);
1067 prob = new_nonexit->probability;
1068 new_nonexit->probability = REG_BR_PROB_BASE - new_exit->probability;
1069 new_nonexit->count = exit_bb->count - new_exit->count;
1070 if (new_nonexit->count < 0)
1071 new_nonexit->count = 0;
1072 if (prob > 0)
1073 scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
1074 prob);
1076 /* Finally create the new counter for number of iterations and add the new
1077 exit instruction. */
1078 bsi = gsi_last_bb (exit_bb);
1079 exit_if = gsi_stmt (bsi);
1080 create_iv (exit_base, exit_step, NULL_TREE, loop,
1081 &bsi, false, &ctr_before, &ctr_after);
1082 gimple_cond_set_code (exit_if, exit_cmp);
1083 gimple_cond_set_lhs (exit_if, ctr_after);
1084 gimple_cond_set_rhs (exit_if, exit_bound);
1085 update_stmt (exit_if);
1087 #ifdef ENABLE_CHECKING
1088 verify_flow_info ();
1089 verify_dominators (CDI_DOMINATORS);
1090 verify_loop_structure ();
1091 verify_loop_closed_ssa ();
1092 #endif
1095 /* Wrapper over tree_transform_and_unroll_loop for case we do not
1096 want to transform the loop before unrolling. The meaning
1097 of the arguments is the same as for tree_transform_and_unroll_loop. */
1099 void
1100 tree_unroll_loop (struct loop *loop, unsigned factor,
1101 edge exit, struct tree_niter_desc *desc)
1103 tree_transform_and_unroll_loop (loop, factor, exit, desc,
1104 NULL, NULL);
1107 /* Rewrite the phi node at position PSI in function of the main
1108 induction variable MAIN_IV and insert the generated code at GSI. */
1110 static void
1111 rewrite_phi_with_iv (loop_p loop,
1112 gimple_stmt_iterator *psi,
1113 gimple_stmt_iterator *gsi,
1114 tree main_iv)
1116 affine_iv iv;
1117 gimple stmt, phi = gsi_stmt (*psi);
1118 tree atype, mtype, val, res = PHI_RESULT (phi);
1120 if (!is_gimple_reg (res) || res == main_iv)
1122 gsi_next (psi);
1123 return;
1126 if (!simple_iv (loop, loop, res, &iv, true))
1128 gsi_next (psi);
1129 return;
1132 remove_phi_node (psi, false);
1134 atype = TREE_TYPE (res);
1135 mtype = POINTER_TYPE_P (atype) ? sizetype : atype;
1136 val = fold_build2 (MULT_EXPR, mtype, unshare_expr (iv.step),
1137 fold_convert (mtype, main_iv));
1138 val = fold_build2 (POINTER_TYPE_P (atype)
1139 ? POINTER_PLUS_EXPR : PLUS_EXPR,
1140 atype, unshare_expr (iv.base), val);
1141 val = force_gimple_operand_gsi (gsi, val, false, NULL_TREE, true,
1142 GSI_SAME_STMT);
1143 stmt = gimple_build_assign (res, val);
1144 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1145 SSA_NAME_DEF_STMT (res) = stmt;
1148 /* Rewrite all the phi nodes of LOOP in function of the main induction
1149 variable MAIN_IV. */
1151 static void
1152 rewrite_all_phi_nodes_with_iv (loop_p loop, tree main_iv)
1154 unsigned i;
1155 basic_block *bbs = get_loop_body_in_dom_order (loop);
1156 gimple_stmt_iterator psi;
1158 for (i = 0; i < loop->num_nodes; i++)
1160 basic_block bb = bbs[i];
1161 gimple_stmt_iterator gsi = gsi_after_labels (bb);
1163 if (bb->loop_father != loop)
1164 continue;
1166 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); )
1167 rewrite_phi_with_iv (loop, &psi, &gsi, main_iv);
1170 free (bbs);
1173 /* Bases all the induction variables in LOOP on a single induction
1174 variable (unsigned with base 0 and step 1), whose final value is
1175 compared with *NIT. When the IV type precision has to be larger
1176 than *NIT type precision, *NIT is converted to the larger type, the
1177 conversion code is inserted before the loop, and *NIT is updated to
1178 the new definition. The induction variable is incremented in the
1179 loop latch. Return the induction variable that was created. */
1181 tree
1182 canonicalize_loop_ivs (struct loop *loop, tree *nit)
1184 unsigned precision = TYPE_PRECISION (TREE_TYPE (*nit));
1185 unsigned original_precision = precision;
1186 tree type, var_before;
1187 gimple_stmt_iterator gsi, psi;
1188 gimple stmt;
1189 edge exit = single_dom_exit (loop);
1190 gimple_seq stmts;
1192 for (psi = gsi_start_phis (loop->header);
1193 !gsi_end_p (psi); gsi_next (&psi))
1195 gimple phi = gsi_stmt (psi);
1196 tree res = PHI_RESULT (phi);
1198 if (is_gimple_reg (res) && TYPE_PRECISION (TREE_TYPE (res)) > precision)
1199 precision = TYPE_PRECISION (TREE_TYPE (res));
1202 type = lang_hooks.types.type_for_size (precision, 1);
1204 if (original_precision != precision)
1206 *nit = fold_convert (type, *nit);
1207 *nit = force_gimple_operand (*nit, &stmts, true, NULL_TREE);
1208 if (stmts)
1209 gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1212 gsi = gsi_last_bb (loop->latch);
1213 create_iv (build_int_cst_type (type, 0), build_int_cst (type, 1), NULL_TREE,
1214 loop, &gsi, true, &var_before, NULL);
1216 rewrite_all_phi_nodes_with_iv (loop, var_before);
1218 stmt = last_stmt (exit->src);
1219 /* Make the loop exit if the control condition is not satisfied. */
1220 if (exit->flags & EDGE_TRUE_VALUE)
1222 edge te, fe;
1224 extract_true_false_edges_from_block (exit->src, &te, &fe);
1225 te->flags = EDGE_FALSE_VALUE;
1226 fe->flags = EDGE_TRUE_VALUE;
1228 gimple_cond_set_code (stmt, LT_EXPR);
1229 gimple_cond_set_lhs (stmt, var_before);
1230 gimple_cond_set_rhs (stmt, *nit);
1231 update_stmt (stmt);
1233 return var_before;