1 /* Induction variable canonicalization.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This pass detects the loops that iterate a constant number of times,
21 adds a canonical induction variable (step -1, tested against 0)
22 and replaces the exit test. This enables the less powerful rtl
23 level analysis to use this information.
25 This might spoil the code in some cases (by increasing register pressure).
26 Note that in the case the new variable is not needed, ivopts will get rid
27 of it, so it might only be a problem when there are no other linear induction
28 variables. In that case the created optimization possibilities are likely
31 Additionally in case we detect that it is beneficial to unroll the
32 loop completely, we do it right here to expose the optimization
33 possibilities to the following passes. */
37 #include "coretypes.h"
42 #include "hard-reg-set.h"
43 #include "basic-block.h"
45 #include "diagnostic.h"
46 #include "tree-flow.h"
47 #include "tree-dump.h"
49 #include "tree-pass.h"
51 #include "tree-chrec.h"
52 #include "tree-scalar-evolution.h"
55 #include "tree-inline.h"
58 /* Specifies types of loops that may be unrolled. */
62 UL_SINGLE_ITER
, /* Only loops that exit immediately in the first
64 UL_NO_GROWTH
, /* Only loops whose unrolling will not cause increase
66 UL_ALL
/* All suitable loops. */
69 /* Adds a canonical induction variable to LOOP iterating NITER times. EXIT
70 is the exit edge whose condition is replaced. */
73 create_canonical_iv (struct loop
*loop
, edge exit
, tree niter
)
78 gimple_stmt_iterator incr_at
;
81 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
83 fprintf (dump_file
, "Added canonical iv to loop %d, ", loop
->num
);
84 print_generic_expr (dump_file
, niter
, TDF_SLIM
);
85 fprintf (dump_file
, " iterations.\n");
88 cond
= last_stmt (exit
->src
);
89 in
= EDGE_SUCC (exit
->src
, 0);
91 in
= EDGE_SUCC (exit
->src
, 1);
93 /* Note that we do not need to worry about overflows, since
94 type of niter is always unsigned and all comparisons are
95 just for equality/nonequality -- i.e. everything works
96 with a modulo arithmetics. */
98 type
= TREE_TYPE (niter
);
99 niter
= fold_build2 (PLUS_EXPR
, type
,
101 build_int_cst (type
, 1));
102 incr_at
= gsi_last_bb (in
->src
);
104 build_int_cst (type
, -1),
106 &incr_at
, false, NULL
, &var
);
108 cmp
= (exit
->flags
& EDGE_TRUE_VALUE
) ? EQ_EXPR
: NE_EXPR
;
109 gimple_cond_set_code (cond
, cmp
);
110 gimple_cond_set_lhs (cond
, var
);
111 gimple_cond_set_rhs (cond
, build_int_cst (type
, 0));
115 /* Computes an estimated number of insns in LOOP, weighted by WEIGHTS. */
118 tree_num_loop_insns (struct loop
*loop
, eni_weights
*weights
)
120 basic_block
*body
= get_loop_body (loop
);
121 gimple_stmt_iterator gsi
;
122 unsigned size
= 0, i
;
124 for (i
= 0; i
< loop
->num_nodes
; i
++)
125 for (gsi
= gsi_start_bb (body
[i
]); !gsi_end_p (gsi
); gsi_next (&gsi
))
126 size
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
132 /* Describe size of loop as detected by tree_estimate_loop_size. */
135 /* Number of instructions in the loop. */
138 /* Number of instructions that will be likely optimized out in
139 peeled iterations of loop (i.e. computation based on induction
140 variable where induction variable starts at known constant.) */
141 int eliminated_by_peeling
;
143 /* Same statistics for last iteration of loop: it is smaller because
144 instructions after exit are not executed. */
146 int last_iteration_eliminated_by_peeling
;
149 /* Return true if OP in STMT will be constant after peeling LOOP. */
152 constant_after_peeling (tree op
, gimple stmt
, struct loop
*loop
)
156 if (is_gimple_min_invariant (op
))
159 /* We can still fold accesses to constant arrays when index is known. */
160 if (TREE_CODE (op
) != SSA_NAME
)
164 /* First make fast look if we see constant array inside. */
165 while (handled_component_p (base
))
166 base
= TREE_OPERAND (base
, 0);
168 && TREE_STATIC (base
)
169 && TREE_READONLY (base
)
170 && (DECL_INITIAL (base
)
171 || (!DECL_EXTERNAL (base
)
172 && targetm
.binds_local_p (base
))))
173 || CONSTANT_CLASS_P (base
))
175 /* If so, see if we understand all the indices. */
177 while (handled_component_p (base
))
179 if (TREE_CODE (base
) == ARRAY_REF
180 && !constant_after_peeling (TREE_OPERAND (base
, 1), stmt
, loop
))
182 base
= TREE_OPERAND (base
, 0);
189 /* Induction variables are constants. */
190 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op
, &iv
, false))
192 if (!is_gimple_min_invariant (iv
.base
))
194 if (!is_gimple_min_invariant (iv
.step
))
199 /* Computes an estimated number of insns in LOOP, weighted by WEIGHTS.
200 Return results in SIZE, estimate benefits for complete unrolling exiting by EXIT. */
203 tree_estimate_loop_size (struct loop
*loop
, edge exit
, struct loop_size
*size
)
205 basic_block
*body
= get_loop_body (loop
);
206 gimple_stmt_iterator gsi
;
211 size
->eliminated_by_peeling
= 0;
212 size
->last_iteration
= 0;
213 size
->last_iteration_eliminated_by_peeling
= 0;
215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
216 fprintf (dump_file
, "Estimating sizes for loop %i\n", loop
->num
);
217 for (i
= 0; i
< loop
->num_nodes
; i
++)
219 if (exit
&& body
[i
] != exit
->src
220 && dominated_by_p (CDI_DOMINATORS
, body
[i
], exit
->src
))
224 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
225 fprintf (dump_file
, " BB: %i, after_exit: %i\n", body
[i
]->index
, after_exit
);
227 for (gsi
= gsi_start_bb (body
[i
]); !gsi_end_p (gsi
); gsi_next (&gsi
))
229 gimple stmt
= gsi_stmt (gsi
);
230 int num
= estimate_num_insns (stmt
, &eni_size_weights
);
231 bool likely_eliminated
= false;
233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
235 fprintf (dump_file
, " size: %3i ", num
);
236 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
239 /* Look for reasons why we might optimize this stmt away. */
241 /* Exit conditional. */
242 if (body
[i
] == exit
->src
&& stmt
== last_stmt (exit
->src
))
244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
245 fprintf (dump_file
, " Exit condition will be eliminated.\n");
246 likely_eliminated
= true;
248 /* Sets of IV variables */
249 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
250 && constant_after_peeling (gimple_assign_lhs (stmt
), stmt
, loop
))
252 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
253 fprintf (dump_file
, " Induction variable computation will"
254 " be folded away.\n");
255 likely_eliminated
= true;
257 /* Assignments of IV variables. */
258 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
259 && TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
260 && constant_after_peeling (gimple_assign_rhs1 (stmt
), stmt
,loop
)
261 && (gimple_assign_rhs_class (stmt
) != GIMPLE_BINARY_RHS
262 || constant_after_peeling (gimple_assign_rhs2 (stmt
),
265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
266 fprintf (dump_file
, " Constant expression will be folded away.\n");
267 likely_eliminated
= true;
270 else if (gimple_code (stmt
) == GIMPLE_COND
271 && constant_after_peeling (gimple_cond_lhs (stmt
), stmt
, loop
)
272 && constant_after_peeling (gimple_cond_rhs (stmt
), stmt
, loop
))
274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
275 fprintf (dump_file
, " Constant conditional.\n");
276 likely_eliminated
= true;
279 size
->overall
+= num
;
280 if (likely_eliminated
)
281 size
->eliminated_by_peeling
+= num
;
284 size
->last_iteration
+= num
;
285 if (likely_eliminated
)
286 size
->last_iteration_eliminated_by_peeling
+= num
;
290 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
291 fprintf (dump_file
, "size: %i-%i, last_iteration: %i-%i\n", size
->overall
,
292 size
->eliminated_by_peeling
, size
->last_iteration
,
293 size
->last_iteration_eliminated_by_peeling
);
298 /* Estimate number of insns of completely unrolled loop.
299 It is (NUNROLL + 1) * size of loop body with taking into account
300 the fact that in last copy everything after exit conditional
301 is dead and that some instructions will be eliminated after
304 Loop body is likely going to simplify futher, this is difficult
305 to guess, we just decrease the result by 1/3. */
307 static unsigned HOST_WIDE_INT
308 estimated_unrolled_size (struct loop_size
*size
,
309 unsigned HOST_WIDE_INT nunroll
)
311 HOST_WIDE_INT unr_insns
= ((nunroll
)
312 * (HOST_WIDE_INT
) (size
->overall
313 - size
->eliminated_by_peeling
));
316 unr_insns
+= size
->last_iteration
- size
->last_iteration_eliminated_by_peeling
;
318 unr_insns
= unr_insns
* 2 / 3;
325 /* Tries to unroll LOOP completely, i.e. NITER times.
326 UL determines which loops we are allowed to unroll.
327 EXIT is the exit of the loop that should be eliminated. */
330 try_unroll_loop_completely (struct loop
*loop
,
331 edge exit
, tree niter
,
332 enum unroll_level ul
)
334 unsigned HOST_WIDE_INT n_unroll
, ninsns
, max_unroll
, unr_insns
;
336 struct loop_size size
;
341 if (!host_integerp (niter
, 1))
343 n_unroll
= tree_low_cst (niter
, 1);
345 max_unroll
= PARAM_VALUE (PARAM_MAX_COMPLETELY_PEEL_TIMES
);
346 if (n_unroll
> max_unroll
)
351 if (ul
== UL_SINGLE_ITER
)
354 tree_estimate_loop_size (loop
, exit
, &size
);
355 ninsns
= size
.overall
;
357 unr_insns
= estimated_unrolled_size (&size
, n_unroll
);
358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
360 fprintf (dump_file
, " Loop size: %d\n", (int) ninsns
);
361 fprintf (dump_file
, " Estimated size after unrolling: %d\n",
365 if (unr_insns
> ninsns
367 > (unsigned) PARAM_VALUE (PARAM_MAX_COMPLETELY_PEELED_INSNS
)))
369 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
370 fprintf (dump_file
, "Not unrolling loop %d "
371 "(--param max-completely-peeled-insns limit reached).\n",
376 if (ul
== UL_NO_GROWTH
377 && unr_insns
> ninsns
)
379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
380 fprintf (dump_file
, "Not unrolling loop %d.\n", loop
->num
);
390 VEC (edge
, heap
) *to_remove
= NULL
;
392 initialize_original_copy_tables ();
393 wont_exit
= sbitmap_alloc (n_unroll
+ 1);
394 sbitmap_ones (wont_exit
);
395 RESET_BIT (wont_exit
, 0);
397 if (!gimple_duplicate_loop_to_header_edge (loop
, loop_preheader_edge (loop
),
400 DLTHE_FLAG_UPDATE_FREQ
401 | DLTHE_FLAG_COMPLETTE_PEEL
))
403 free_original_copy_tables ();
408 for (i
= 0; VEC_iterate (edge
, to_remove
, i
, e
); i
++)
410 bool ok
= remove_path (e
);
414 VEC_free (edge
, heap
, to_remove
);
416 free_original_copy_tables ();
419 cond
= last_stmt (exit
->src
);
420 if (exit
->flags
& EDGE_TRUE_VALUE
)
421 gimple_cond_make_true (cond
);
423 gimple_cond_make_false (cond
);
425 update_ssa (TODO_update_ssa
);
427 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
428 fprintf (dump_file
, "Unrolled loop %d completely.\n", loop
->num
);
433 /* Adds a canonical induction variable to LOOP if suitable.
434 CREATE_IV is true if we may create a new iv. UL determines
435 which loops we are allowed to completely unroll. If TRY_EVAL is true, we try
436 to determine the number of iterations of a loop by direct evaluation.
437 Returns true if cfg is changed. */
440 canonicalize_loop_induction_variables (struct loop
*loop
,
441 bool create_iv
, enum unroll_level ul
,
447 niter
= number_of_latch_executions (loop
);
448 if (TREE_CODE (niter
) == INTEGER_CST
)
450 exit
= single_exit (loop
);
451 if (!just_once_each_iteration_p (loop
, exit
->src
))
456 /* If the loop has more than one exit, try checking all of them
457 for # of iterations determinable through scev. */
458 if (!single_exit (loop
))
459 niter
= find_loop_niter (loop
, &exit
);
461 /* Finally if everything else fails, try brute force evaluation. */
463 && (chrec_contains_undetermined (niter
)
464 || TREE_CODE (niter
) != INTEGER_CST
))
465 niter
= find_loop_niter_by_eval (loop
, &exit
);
467 if (chrec_contains_undetermined (niter
)
468 || TREE_CODE (niter
) != INTEGER_CST
)
472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
474 fprintf (dump_file
, "Loop %d iterates ", loop
->num
);
475 print_generic_expr (dump_file
, niter
, TDF_SLIM
);
476 fprintf (dump_file
, " times.\n");
479 if (try_unroll_loop_completely (loop
, exit
, niter
, ul
))
483 create_canonical_iv (loop
, exit
, niter
);
488 /* The main entry point of the pass. Adds canonical induction variables
489 to the suitable loops. */
492 canonicalize_induction_variables (void)
496 bool changed
= false;
498 FOR_EACH_LOOP (li
, loop
, 0)
500 changed
|= canonicalize_loop_induction_variables (loop
,
501 true, UL_SINGLE_ITER
,
505 /* Clean up the information about numbers of iterations, since brute force
506 evaluation could reveal new information. */
510 return TODO_cleanup_cfg
;
514 /* Unroll LOOPS completely if they iterate just few times. Unless
515 MAY_INCREASE_SIZE is true, perform the unrolling only if the
516 size of the code does not increase. */
519 tree_unroll_loops_completely (bool may_increase_size
, bool unroll_outer
)
524 enum unroll_level ul
;
530 FOR_EACH_LOOP (li
, loop
, LI_ONLY_INNERMOST
)
532 if (may_increase_size
&& optimize_loop_for_speed_p (loop
)
533 /* Unroll outermost loops only if asked to do so or they do
534 not cause code growth. */
536 || loop_outer (loop_outer (loop
))))
540 changed
|= canonicalize_loop_induction_variables
541 (loop
, false, ul
, !flag_tree_loop_ivcanon
);
546 /* This will take care of removing completely unrolled loops
547 from the loop structures so we can continue unrolling now
549 if (cleanup_tree_cfg ())
550 update_ssa (TODO_update_ssa_only_virtuals
);
552 /* Clean up the information about numbers of iterations, since
553 complete unrolling might have invalidated it. */