Mark ChangeLog
[official-gcc.git] / gcc / loop-unswitch.c
blob4a8865433e42fbb33e3f22bf8d2ee508d9087b4a
1 /* Loop unswitching for GNU compiler.
2 Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "obstack.h"
28 #include "basic-block.h"
29 #include "cfgloop.h"
30 #include "cfglayout.h"
31 #include "params.h"
32 #include "output.h"
33 #include "expr.h"
35 /* This pass moves constant conditions out of loops, duplicating the loop
36 in progress, i.e. this code:
38 while (loop_cond)
41 if (cond)
42 branch1;
43 else
44 branch2;
46 if (cond)
47 branch3;
50 where nothing inside the loop alters cond is transformed
51 into
53 if (cond)
55 while (loop_cond)
58 branch1;
60 branch3;
64 else
66 while (loop_cond)
69 branch2;
75 Duplicating the loop might lead to code growth exponential in number of
76 branches inside loop, so we limit the number of unswitchings performed
77 in a single loop to PARAM_MAX_UNSWITCH_LEVEL. We only perform the
78 transformation on innermost loops, as the benefit of doing it on loops
79 containing subloops would not be very large compared to complications
80 with handling this case. */
82 static struct loop *unswitch_loop (struct loops *, struct loop *,
83 basic_block, rtx, rtx);
84 static void unswitch_single_loop (struct loops *, struct loop *, rtx, int);
85 static rtx may_unswitch_on (basic_block, struct loop *, rtx *);
87 /* Prepare a sequence comparing OP0 with OP1 using COMP and jumping to LABEL if
88 true, with probability PROB. If CINSN is not NULL, it is the insn to copy
89 in order to create a jump. */
91 rtx
92 compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
93 rtx cinsn)
95 rtx seq, jump, cond;
96 enum machine_mode mode;
98 mode = GET_MODE (op0);
99 if (mode == VOIDmode)
100 mode = GET_MODE (op1);
102 start_sequence ();
103 if (GET_MODE_CLASS (mode) == MODE_CC)
105 /* A hack -- there seems to be no easy generic way how to make a
106 conditional jump from a ccmode comparison. */
107 if (!cinsn)
108 abort ();
109 cond = XEXP (SET_SRC (pc_set (cinsn)), 0);
110 if (GET_CODE (cond) != comp
111 || !rtx_equal_p (op0, XEXP (cond, 0))
112 || !rtx_equal_p (op1, XEXP (cond, 1)))
113 abort ();
114 emit_jump_insn (copy_insn (PATTERN (cinsn)));
115 jump = get_last_insn ();
116 JUMP_LABEL (jump) = JUMP_LABEL (cinsn);
117 LABEL_NUSES (JUMP_LABEL (jump))++;
118 redirect_jump (jump, label, 0);
120 else
122 if (cinsn)
123 abort ();
125 op0 = force_operand (op0, NULL_RTX);
126 op1 = force_operand (op1, NULL_RTX);
127 do_compare_rtx_and_jump (op0, op1, comp, 0,
128 mode, NULL_RTX, NULL_RTX, label);
129 jump = get_last_insn ();
130 JUMP_LABEL (jump) = label;
131 LABEL_NUSES (label)++;
133 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
134 REG_NOTES (jump));
135 seq = get_insns ();
136 end_sequence ();
138 return seq;
141 /* Main entry point. Perform loop unswitching on all suitable LOOPS. */
142 void
143 unswitch_loops (struct loops *loops)
145 int i, num;
146 struct loop *loop;
148 /* Go through inner loops (only original ones). */
149 num = loops->num;
151 for (i = 1; i < num; i++)
153 /* Removed loop? */
154 loop = loops->parray[i];
155 if (!loop)
156 continue;
158 if (loop->inner)
159 continue;
161 unswitch_single_loop (loops, loop, NULL_RTX, 0);
162 #ifdef ENABLE_CHECKING
163 verify_dominators (CDI_DOMINATORS);
164 verify_loop_structure (loops);
165 #endif
168 iv_analysis_done ();
171 /* Checks whether we can unswitch LOOP on condition at end of BB -- one of its
172 basic blocks (for what it means see comments below). In case condition
173 compares loop invariant cc mode register, return the jump in CINSN. */
175 static rtx
176 may_unswitch_on (basic_block bb, struct loop *loop, rtx *cinsn)
178 rtx test, at, insn, op[2], stest;
179 struct rtx_iv iv;
180 unsigned i;
181 enum machine_mode mode;
183 /* BB must end in a simple conditional jump. */
184 if (EDGE_COUNT (bb->succs) != 2)
185 return NULL_RTX;
186 if (!any_condjump_p (BB_END (bb)))
187 return NULL_RTX;
189 /* With branches inside loop. */
190 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 0)->dest)
191 || !flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 1)->dest))
192 return NULL_RTX;
194 /* It must be executed just once each iteration (because otherwise we
195 are unable to update dominator/irreducible loop information correctly). */
196 if (!just_once_each_iteration_p (loop, bb))
197 return NULL_RTX;
199 /* Condition must be invariant. */
200 test = get_condition (BB_END (bb), &at, true, false);
201 if (!test)
202 return NULL_RTX;
204 for (i = 0; i < 2; i++)
206 op[i] = XEXP (test, i);
208 if (CONSTANT_P (op[i]))
209 continue;
211 insn = iv_get_reaching_def (at, op[i]);
212 if (!iv_analyze (insn, op[i], &iv))
213 return NULL_RTX;
214 if (iv.step != const0_rtx
215 || iv.first_special)
216 return NULL_RTX;
218 op[i] = get_iv_value (&iv, const0_rtx);
221 mode = GET_MODE (op[0]);
222 if (mode == VOIDmode)
223 mode = GET_MODE (op[1]);
224 if (GET_MODE_CLASS (mode) == MODE_CC)
226 if (at != BB_END (bb))
227 return NULL_RTX;
229 if (!rtx_equal_p (op[0], XEXP (test, 0))
230 || !rtx_equal_p (op[1], XEXP (test, 1)))
231 return NULL_RTX;
233 *cinsn = BB_END (bb);
234 return test;
237 stest = simplify_gen_relational (GET_CODE (test), SImode,
238 mode, op[0], op[1]);
239 if (stest == const0_rtx
240 || stest == const_true_rtx)
241 return stest;
243 return canon_condition (gen_rtx_fmt_ee (GET_CODE (test), SImode,
244 op[0], op[1]));
247 /* Reverses CONDition; returns NULL if we cannot. */
249 reversed_condition (rtx cond)
251 enum rtx_code reversed;
252 reversed = reversed_comparison_code (cond, NULL);
253 if (reversed == UNKNOWN)
254 return NULL_RTX;
255 else
256 return gen_rtx_fmt_ee (reversed,
257 GET_MODE (cond), XEXP (cond, 0),
258 XEXP (cond, 1));
261 /* Unswitch single LOOP. COND_CHECKED holds list of conditions we already
262 unswitched on and are therefore known to be true in this LOOP. NUM is
263 number of unswitchings done; do not allow it to grow too much, it is too
264 easy to create example on that the code would grow exponentially. */
265 static void
266 unswitch_single_loop (struct loops *loops, struct loop *loop,
267 rtx cond_checked, int num)
269 basic_block *bbs;
270 struct loop *nloop;
271 unsigned i;
272 rtx cond, rcond = NULL_RTX, conds, rconds, acond, cinsn;
273 int repeat;
274 edge e;
276 /* Do not unswitch too much. */
277 if (num > PARAM_VALUE (PARAM_MAX_UNSWITCH_LEVEL))
279 if (dump_file)
280 fprintf (dump_file, ";; Not unswitching anymore, hit max level\n");
281 return;
284 /* Only unswitch innermost loops. */
285 if (loop->inner)
287 if (dump_file)
288 fprintf (dump_file, ";; Not unswitching, not innermost loop\n");
289 return;
292 /* We must be able to duplicate loop body. */
293 if (!can_duplicate_loop_p (loop))
295 if (dump_file)
296 fprintf (dump_file, ";; Not unswitching, can't duplicate loop\n");
297 return;
300 /* The loop should not be too large, to limit code growth. */
301 if (num_loop_insns (loop) > PARAM_VALUE (PARAM_MAX_UNSWITCH_INSNS))
303 if (dump_file)
304 fprintf (dump_file, ";; Not unswitching, loop too big\n");
305 return;
308 /* Do not unswitch in cold areas. */
309 if (!maybe_hot_bb_p (loop->header))
311 if (dump_file)
312 fprintf (dump_file, ";; Not unswitching, not hot area\n");
313 return;
316 /* Nor if the loop usually does not roll. */
317 if (expected_loop_iterations (loop) < 1)
319 if (dump_file)
320 fprintf (dump_file, ";; Not unswitching, loop iterations < 1\n");
321 return;
326 repeat = 0;
327 cinsn = NULL_RTX;
329 /* Find a bb to unswitch on. */
330 bbs = get_loop_body (loop);
331 iv_analysis_loop_init (loop);
332 for (i = 0; i < loop->num_nodes; i++)
333 if ((cond = may_unswitch_on (bbs[i], loop, &cinsn)))
334 break;
336 if (i == loop->num_nodes)
338 free (bbs);
339 return;
342 if (cond != const0_rtx
343 && cond != const_true_rtx)
345 rcond = reversed_condition (cond);
346 if (rcond)
347 rcond = canon_condition (rcond);
349 /* Check whether the result can be predicted. */
350 for (acond = cond_checked; acond; acond = XEXP (acond, 1))
351 simplify_using_condition (XEXP (acond, 0), &cond, NULL);
354 if (cond == const_true_rtx)
356 /* Remove false path. */
357 e = FALLTHRU_EDGE (bbs[i]);
358 remove_path (loops, e);
359 free (bbs);
360 repeat = 1;
362 else if (cond == const0_rtx)
364 /* Remove true path. */
365 e = BRANCH_EDGE (bbs[i]);
366 remove_path (loops, e);
367 free (bbs);
368 repeat = 1;
370 } while (repeat);
372 /* We found the condition we can unswitch on. */
373 conds = alloc_EXPR_LIST (0, cond, cond_checked);
374 if (rcond)
375 rconds = alloc_EXPR_LIST (0, rcond, cond_checked);
376 else
377 rconds = cond_checked;
379 if (dump_file)
380 fprintf (dump_file, ";; Unswitching loop\n");
382 /* Unswitch the loop on this condition. */
383 nloop = unswitch_loop (loops, loop, bbs[i], cond, cinsn);
384 if (!nloop)
385 abort ();
387 /* Invoke itself on modified loops. */
388 unswitch_single_loop (loops, nloop, rconds, num + 1);
389 unswitch_single_loop (loops, loop, conds, num + 1);
391 free_EXPR_LIST_node (conds);
392 if (rcond)
393 free_EXPR_LIST_node (rconds);
395 free (bbs);
398 /* Unswitch a LOOP w.r. to given basic block UNSWITCH_ON. We only support
399 unswitching of innermost loops. UNSWITCH_ON must be executed in every
400 iteration, i.e. it must dominate LOOP latch. COND is the condition
401 determining which loop is entered. Returns NULL if impossible, new loop
402 otherwise. The new loop is entered if COND is true. If CINSN is not
403 NULL, it is the insn in that COND is compared. */
405 static struct loop *
406 unswitch_loop (struct loops *loops, struct loop *loop, basic_block unswitch_on,
407 rtx cond, rtx cinsn)
409 edge entry, latch_edge, true_edge, false_edge, e;
410 basic_block switch_bb, unswitch_on_alt, src;
411 struct loop *nloop;
412 sbitmap zero_bitmap;
413 int irred_flag, prob;
414 rtx seq;
416 /* Some sanity checking. */
417 if (!flow_bb_inside_loop_p (loop, unswitch_on))
418 abort ();
419 if (EDGE_COUNT (unswitch_on->succs) != 2)
420 abort ();
421 if (!just_once_each_iteration_p (loop, unswitch_on))
422 abort ();
423 if (loop->inner)
424 abort ();
425 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 0)->dest))
426 abort ();
427 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 1)->dest))
428 abort ();
430 entry = loop_preheader_edge (loop);
432 /* Make a copy. */
433 src = entry->src;
434 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
435 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
436 zero_bitmap = sbitmap_alloc (2);
437 sbitmap_zero (zero_bitmap);
438 if (!duplicate_loop_to_header_edge (loop, entry, loops, 1,
439 zero_bitmap, NULL, NULL, NULL, 0))
440 return NULL;
441 free (zero_bitmap);
442 entry->flags |= irred_flag;
444 /* Record the block with condition we unswitch on. */
445 unswitch_on_alt = unswitch_on->rbi->copy;
446 true_edge = BRANCH_EDGE (unswitch_on_alt);
447 false_edge = FALLTHRU_EDGE (unswitch_on);
448 latch_edge = EDGE_SUCC (loop->latch->rbi->copy, 0);
450 /* Create a block with the condition. */
451 prob = true_edge->probability;
452 switch_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
453 seq = compare_and_jump_seq (XEXP (cond, 0), XEXP (cond, 1), GET_CODE (cond),
454 block_label (true_edge->dest),
455 prob, cinsn);
456 emit_insn_after (seq, BB_END (switch_bb));
457 e = make_edge (switch_bb, true_edge->dest, 0);
458 e->probability = prob;
459 e->count = latch_edge->count * prob / REG_BR_PROB_BASE;
460 e = make_edge (switch_bb, FALLTHRU_EDGE (unswitch_on)->dest, EDGE_FALLTHRU);
461 e->probability = false_edge->probability;
462 e->count = latch_edge->count * (false_edge->probability) / REG_BR_PROB_BASE;
464 if (irred_flag)
466 switch_bb->flags |= BB_IRREDUCIBLE_LOOP;
467 EDGE_SUCC (switch_bb, 0)->flags |= EDGE_IRREDUCIBLE_LOOP;
468 EDGE_SUCC (switch_bb, 1)->flags |= EDGE_IRREDUCIBLE_LOOP;
470 else
472 switch_bb->flags &= ~BB_IRREDUCIBLE_LOOP;
473 EDGE_SUCC (switch_bb, 0)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
474 EDGE_SUCC (switch_bb, 1)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
477 /* Loopify from the copy of LOOP body, constructing the new loop. */
478 nloop = loopify (loops, latch_edge,
479 EDGE_PRED (loop->header->rbi->copy, 0), switch_bb,
480 BRANCH_EDGE (switch_bb), FALLTHRU_EDGE (switch_bb), true);
482 /* Remove branches that are now unreachable in new loops. */
483 remove_path (loops, true_edge);
484 remove_path (loops, false_edge);
486 /* One of created loops do not have to be subloop of the outer loop now,
487 so fix its placement in loop data structure. */
488 fix_loop_placement (loop);
489 fix_loop_placement (nloop);
491 /* Preserve the simple loop preheaders. */
492 loop_split_edge_with (loop_preheader_edge (loop), NULL_RTX);
493 loop_split_edge_with (loop_preheader_edge (nloop), NULL_RTX);
495 return nloop;