1 /* Induction variable canonicalization.
2 Copyright (C) 2004, 2005, 2007, 2008, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This pass detects the loops that iterate a constant number of times,
22 adds a canonical induction variable (step -1, tested against 0)
23 and replaces the exit test. This enables the less powerful rtl
24 level analysis to use this information.
26 This might spoil the code in some cases (by increasing register pressure).
27 Note that in the case the new variable is not needed, ivopts will get rid
28 of it, so it might only be a problem when there are no other linear induction
29 variables. In that case the created optimization possibilities are likely
32 Additionally in case we detect that it is beneficial to unroll the
33 loop completely, we do it right here to expose the optimization
34 possibilities to the following passes. */
38 #include "coretypes.h"
43 #include "hard-reg-set.h"
44 #include "basic-block.h"
46 #include "diagnostic.h"
47 #include "tree-flow.h"
48 #include "tree-dump.h"
50 #include "tree-pass.h"
52 #include "tree-chrec.h"
53 #include "tree-scalar-evolution.h"
56 #include "tree-inline.h"
59 /* Specifies types of loops that may be unrolled. */
63 UL_SINGLE_ITER
, /* Only loops that exit immediately in the first
65 UL_NO_GROWTH
, /* Only loops whose unrolling will not cause increase
67 UL_ALL
/* All suitable loops. */
70 /* Adds a canonical induction variable to LOOP iterating NITER times. EXIT
71 is the exit edge whose condition is replaced. */
74 create_canonical_iv (struct loop
*loop
, edge exit
, tree niter
)
79 gimple_stmt_iterator incr_at
;
82 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
84 fprintf (dump_file
, "Added canonical iv to loop %d, ", loop
->num
);
85 print_generic_expr (dump_file
, niter
, TDF_SLIM
);
86 fprintf (dump_file
, " iterations.\n");
89 cond
= last_stmt (exit
->src
);
90 in
= EDGE_SUCC (exit
->src
, 0);
92 in
= EDGE_SUCC (exit
->src
, 1);
94 /* Note that we do not need to worry about overflows, since
95 type of niter is always unsigned and all comparisons are
96 just for equality/nonequality -- i.e. everything works
97 with a modulo arithmetics. */
99 type
= TREE_TYPE (niter
);
100 niter
= fold_build2 (PLUS_EXPR
, type
,
102 build_int_cst (type
, 1));
103 incr_at
= gsi_last_bb (in
->src
);
105 build_int_cst (type
, -1),
107 &incr_at
, false, NULL
, &var
);
109 cmp
= (exit
->flags
& EDGE_TRUE_VALUE
) ? EQ_EXPR
: NE_EXPR
;
110 gimple_cond_set_code (cond
, cmp
);
111 gimple_cond_set_lhs (cond
, var
);
112 gimple_cond_set_rhs (cond
, build_int_cst (type
, 0));
116 /* Computes an estimated number of insns in LOOP, weighted by WEIGHTS. */
119 tree_num_loop_insns (struct loop
*loop
, eni_weights
*weights
)
121 basic_block
*body
= get_loop_body (loop
);
122 gimple_stmt_iterator gsi
;
123 unsigned size
= 0, i
;
125 for (i
= 0; i
< loop
->num_nodes
; i
++)
126 for (gsi
= gsi_start_bb (body
[i
]); !gsi_end_p (gsi
); gsi_next (&gsi
))
127 size
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
133 /* Describe size of loop as detected by tree_estimate_loop_size. */
136 /* Number of instructions in the loop. */
139 /* Number of instructions that will be likely optimized out in
140 peeled iterations of loop (i.e. computation based on induction
141 variable where induction variable starts at known constant.) */
142 int eliminated_by_peeling
;
144 /* Same statistics for last iteration of loop: it is smaller because
145 instructions after exit are not executed. */
147 int last_iteration_eliminated_by_peeling
;
150 /* Return true if OP in STMT will be constant after peeling LOOP. */
153 constant_after_peeling (tree op
, gimple stmt
, struct loop
*loop
)
157 if (is_gimple_min_invariant (op
))
160 /* We can still fold accesses to constant arrays when index is known. */
161 if (TREE_CODE (op
) != SSA_NAME
)
165 /* First make fast look if we see constant array inside. */
166 while (handled_component_p (base
))
167 base
= TREE_OPERAND (base
, 0);
169 && TREE_STATIC (base
)
170 && TREE_READONLY (base
)
171 && (DECL_INITIAL (base
)
172 || (!DECL_EXTERNAL (base
)
173 && targetm
.binds_local_p (base
))))
174 || CONSTANT_CLASS_P (base
))
176 /* If so, see if we understand all the indices. */
178 while (handled_component_p (base
))
180 if (TREE_CODE (base
) == ARRAY_REF
181 && !constant_after_peeling (TREE_OPERAND (base
, 1), stmt
, loop
))
183 base
= TREE_OPERAND (base
, 0);
190 /* Induction variables are constants. */
191 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op
, &iv
, false))
193 if (!is_gimple_min_invariant (iv
.base
))
195 if (!is_gimple_min_invariant (iv
.step
))
200 /* Computes an estimated number of insns in LOOP, weighted by WEIGHTS.
201 Return results in SIZE, estimate benefits for complete unrolling exiting by EXIT. */
204 tree_estimate_loop_size (struct loop
*loop
, edge exit
, struct loop_size
*size
)
206 basic_block
*body
= get_loop_body (loop
);
207 gimple_stmt_iterator gsi
;
212 size
->eliminated_by_peeling
= 0;
213 size
->last_iteration
= 0;
214 size
->last_iteration_eliminated_by_peeling
= 0;
216 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
217 fprintf (dump_file
, "Estimating sizes for loop %i\n", loop
->num
);
218 for (i
= 0; i
< loop
->num_nodes
; i
++)
220 if (exit
&& body
[i
] != exit
->src
221 && dominated_by_p (CDI_DOMINATORS
, body
[i
], exit
->src
))
225 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
226 fprintf (dump_file
, " BB: %i, after_exit: %i\n", body
[i
]->index
, after_exit
);
228 for (gsi
= gsi_start_bb (body
[i
]); !gsi_end_p (gsi
); gsi_next (&gsi
))
230 gimple stmt
= gsi_stmt (gsi
);
231 int num
= estimate_num_insns (stmt
, &eni_size_weights
);
232 bool likely_eliminated
= false;
234 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
236 fprintf (dump_file
, " size: %3i ", num
);
237 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
240 /* Look for reasons why we might optimize this stmt away. */
242 /* Exit conditional. */
243 if (body
[i
] == exit
->src
&& stmt
== last_stmt (exit
->src
))
245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
246 fprintf (dump_file
, " Exit condition will be eliminated.\n");
247 likely_eliminated
= true;
249 /* Sets of IV variables */
250 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
251 && constant_after_peeling (gimple_assign_lhs (stmt
), stmt
, loop
))
253 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
254 fprintf (dump_file
, " Induction variable computation will"
255 " be folded away.\n");
256 likely_eliminated
= true;
258 /* Assignments of IV variables. */
259 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
260 && TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
261 && constant_after_peeling (gimple_assign_rhs1 (stmt
), stmt
,loop
)
262 && (gimple_assign_rhs_class (stmt
) != GIMPLE_BINARY_RHS
263 || constant_after_peeling (gimple_assign_rhs2 (stmt
),
266 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
267 fprintf (dump_file
, " Constant expression will be folded away.\n");
268 likely_eliminated
= true;
271 else if (gimple_code (stmt
) == GIMPLE_COND
272 && constant_after_peeling (gimple_cond_lhs (stmt
), stmt
, loop
)
273 && constant_after_peeling (gimple_cond_rhs (stmt
), stmt
, loop
))
275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
276 fprintf (dump_file
, " Constant conditional.\n");
277 likely_eliminated
= true;
280 size
->overall
+= num
;
281 if (likely_eliminated
)
282 size
->eliminated_by_peeling
+= num
;
285 size
->last_iteration
+= num
;
286 if (likely_eliminated
)
287 size
->last_iteration_eliminated_by_peeling
+= num
;
291 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
292 fprintf (dump_file
, "size: %i-%i, last_iteration: %i-%i\n", size
->overall
,
293 size
->eliminated_by_peeling
, size
->last_iteration
,
294 size
->last_iteration_eliminated_by_peeling
);
299 /* Estimate number of insns of completely unrolled loop.
300 It is (NUNROLL + 1) * size of loop body with taking into account
301 the fact that in last copy everything after exit conditional
302 is dead and that some instructions will be eliminated after
305 Loop body is likely going to simplify futher, this is difficult
306 to guess, we just decrease the result by 1/3. */
308 static unsigned HOST_WIDE_INT
309 estimated_unrolled_size (struct loop_size
*size
,
310 unsigned HOST_WIDE_INT nunroll
)
312 HOST_WIDE_INT unr_insns
= ((nunroll
)
313 * (HOST_WIDE_INT
) (size
->overall
314 - size
->eliminated_by_peeling
));
317 unr_insns
+= size
->last_iteration
- size
->last_iteration_eliminated_by_peeling
;
319 unr_insns
= unr_insns
* 2 / 3;
326 /* Tries to unroll LOOP completely, i.e. NITER times.
327 UL determines which loops we are allowed to unroll.
328 EXIT is the exit of the loop that should be eliminated. */
331 try_unroll_loop_completely (struct loop
*loop
,
332 edge exit
, tree niter
,
333 enum unroll_level ul
)
335 unsigned HOST_WIDE_INT n_unroll
, ninsns
, max_unroll
, unr_insns
;
337 struct loop_size size
;
342 if (!host_integerp (niter
, 1))
344 n_unroll
= tree_low_cst (niter
, 1);
346 max_unroll
= PARAM_VALUE (PARAM_MAX_COMPLETELY_PEEL_TIMES
);
347 if (n_unroll
> max_unroll
)
352 if (ul
== UL_SINGLE_ITER
)
355 tree_estimate_loop_size (loop
, exit
, &size
);
356 ninsns
= size
.overall
;
358 unr_insns
= estimated_unrolled_size (&size
, n_unroll
);
359 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
361 fprintf (dump_file
, " Loop size: %d\n", (int) ninsns
);
362 fprintf (dump_file
, " Estimated size after unrolling: %d\n",
366 if (unr_insns
> ninsns
368 > (unsigned) PARAM_VALUE (PARAM_MAX_COMPLETELY_PEELED_INSNS
)))
370 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
371 fprintf (dump_file
, "Not unrolling loop %d "
372 "(--param max-completely-peeled-insns limit reached).\n",
377 if (ul
== UL_NO_GROWTH
378 && unr_insns
> ninsns
)
380 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
381 fprintf (dump_file
, "Not unrolling loop %d.\n", loop
->num
);
391 VEC (edge
, heap
) *to_remove
= NULL
;
393 initialize_original_copy_tables ();
394 wont_exit
= sbitmap_alloc (n_unroll
+ 1);
395 sbitmap_ones (wont_exit
);
396 RESET_BIT (wont_exit
, 0);
398 if (!gimple_duplicate_loop_to_header_edge (loop
, loop_preheader_edge (loop
),
401 DLTHE_FLAG_UPDATE_FREQ
402 | DLTHE_FLAG_COMPLETTE_PEEL
))
404 free_original_copy_tables ();
409 for (i
= 0; VEC_iterate (edge
, to_remove
, i
, e
); i
++)
411 bool ok
= remove_path (e
);
415 VEC_free (edge
, heap
, to_remove
);
417 free_original_copy_tables ();
420 cond
= last_stmt (exit
->src
);
421 if (exit
->flags
& EDGE_TRUE_VALUE
)
422 gimple_cond_make_true (cond
);
424 gimple_cond_make_false (cond
);
426 update_ssa (TODO_update_ssa
);
428 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
429 fprintf (dump_file
, "Unrolled loop %d completely.\n", loop
->num
);
434 /* Adds a canonical induction variable to LOOP if suitable.
435 CREATE_IV is true if we may create a new iv. UL determines
436 which loops we are allowed to completely unroll. If TRY_EVAL is true, we try
437 to determine the number of iterations of a loop by direct evaluation.
438 Returns true if cfg is changed. */
441 canonicalize_loop_induction_variables (struct loop
*loop
,
442 bool create_iv
, enum unroll_level ul
,
448 niter
= number_of_latch_executions (loop
);
449 if (TREE_CODE (niter
) == INTEGER_CST
)
451 exit
= single_exit (loop
);
452 if (!just_once_each_iteration_p (loop
, exit
->src
))
457 /* If the loop has more than one exit, try checking all of them
458 for # of iterations determinable through scev. */
459 if (!single_exit (loop
))
460 niter
= find_loop_niter (loop
, &exit
);
462 /* Finally if everything else fails, try brute force evaluation. */
464 && (chrec_contains_undetermined (niter
)
465 || TREE_CODE (niter
) != INTEGER_CST
))
466 niter
= find_loop_niter_by_eval (loop
, &exit
);
468 if (chrec_contains_undetermined (niter
)
469 || TREE_CODE (niter
) != INTEGER_CST
)
473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
475 fprintf (dump_file
, "Loop %d iterates ", loop
->num
);
476 print_generic_expr (dump_file
, niter
, TDF_SLIM
);
477 fprintf (dump_file
, " times.\n");
480 if (try_unroll_loop_completely (loop
, exit
, niter
, ul
))
484 create_canonical_iv (loop
, exit
, niter
);
489 /* The main entry point of the pass. Adds canonical induction variables
490 to the suitable loops. */
493 canonicalize_induction_variables (void)
497 bool changed
= false;
499 FOR_EACH_LOOP (li
, loop
, 0)
501 changed
|= canonicalize_loop_induction_variables (loop
,
502 true, UL_SINGLE_ITER
,
506 /* Clean up the information about numbers of iterations, since brute force
507 evaluation could reveal new information. */
511 return TODO_cleanup_cfg
;
515 /* Unroll LOOPS completely if they iterate just few times. Unless
516 MAY_INCREASE_SIZE is true, perform the unrolling only if the
517 size of the code does not increase. */
520 tree_unroll_loops_completely (bool may_increase_size
, bool unroll_outer
)
525 enum unroll_level ul
;
532 FOR_EACH_LOOP (li
, loop
, LI_ONLY_INNERMOST
)
534 if (may_increase_size
&& optimize_loop_for_speed_p (loop
)
535 /* Unroll outermost loops only if asked to do so or they do
536 not cause code growth. */
538 || loop_outer (loop_outer (loop
))))
542 changed
|= canonicalize_loop_induction_variables
543 (loop
, false, ul
, !flag_tree_loop_ivcanon
);
548 /* This will take care of removing completely unrolled loops
549 from the loop structures so we can continue unrolling now
551 if (cleanup_tree_cfg ())
552 update_ssa (TODO_update_ssa_only_virtuals
);
554 /* Clean up the information about numbers of iterations, since
555 complete unrolling might have invalidated it. */
560 && ++iteration
<= PARAM_VALUE (PARAM_MAX_UNROLL_ITERATIONS
));