* configure.ac: Don't test for [build] __cxa_atexit when building a
[official-gcc.git] / gcc / loop-unswitch.c
blobd1d49b2a38f6455fe2b6f73df39cde5adf4cec3e
1 /* Loop unswitching for GNU compiler.
2 Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "cfgloop.h"
29 #include "cfglayout.h"
30 #include "params.h"
31 #include "output.h"
32 #include "expr.h"
34 /* This pass moves constant conditions out of loops, duplicating the loop
35 in progress, i.e. this code:
37 while (loop_cond)
40 if (cond)
41 branch1;
42 else
43 branch2;
45 if (cond)
46 branch3;
49 where nothing inside the loop alters cond is transformed
50 into
52 if (cond)
54 while (loop_cond)
57 branch1;
59 branch3;
63 else
65 while (loop_cond)
68 branch2;
74 Duplicating the loop might lead to code growth exponential in number of
75 branches inside loop, so we limit the number of unswitchings performed
76 in a single loop to PARAM_MAX_UNSWITCH_LEVEL. We only perform the
77 transformation on innermost loops, as the benefit of doing it on loops
78 containing subloops would not be very large compared to complications
79 with handling this case. */
81 static struct loop *unswitch_loop (struct loops *, struct loop *,
82 basic_block, rtx, rtx);
83 static void unswitch_single_loop (struct loops *, struct loop *, rtx, int);
84 static rtx may_unswitch_on (basic_block, struct loop *, rtx *);
86 /* Prepare a sequence comparing OP0 with OP1 using COMP and jumping to LABEL if
87 true, with probability PROB. If CINSN is not NULL, it is the insn to copy
88 in order to create a jump. */
90 rtx
91 compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
92 rtx cinsn)
94 rtx seq, jump, cond;
95 enum machine_mode mode;
97 mode = GET_MODE (op0);
98 if (mode == VOIDmode)
99 mode = GET_MODE (op1);
101 start_sequence ();
102 if (GET_MODE_CLASS (mode) == MODE_CC)
104 /* A hack -- there seems to be no easy generic way how to make a
105 conditional jump from a ccmode comparison. */
106 if (!cinsn)
107 abort ();
108 cond = XEXP (SET_SRC (pc_set (cinsn)), 0);
109 if (GET_CODE (cond) != comp
110 || !rtx_equal_p (op0, XEXP (cond, 0))
111 || !rtx_equal_p (op1, XEXP (cond, 1)))
112 abort ();
113 emit_jump_insn (copy_insn (PATTERN (cinsn)));
114 jump = get_last_insn ();
115 JUMP_LABEL (jump) = JUMP_LABEL (cinsn);
116 LABEL_NUSES (JUMP_LABEL (jump))++;
117 redirect_jump (jump, label, 0);
119 else
121 if (cinsn)
122 abort ();
124 op0 = force_operand (op0, NULL_RTX);
125 op1 = force_operand (op1, NULL_RTX);
126 do_compare_rtx_and_jump (op0, op1, comp, 0,
127 mode, NULL_RTX, NULL_RTX, label);
128 jump = get_last_insn ();
129 JUMP_LABEL (jump) = label;
130 LABEL_NUSES (label)++;
132 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
133 REG_NOTES (jump));
134 seq = get_insns ();
135 end_sequence ();
137 return seq;
140 /* Main entry point. Perform loop unswitching on all suitable LOOPS. */
141 void
142 unswitch_loops (struct loops *loops)
144 int i, num;
145 struct loop *loop;
147 /* Go through inner loops (only original ones). */
148 num = loops->num;
150 for (i = 1; i < num; i++)
152 /* Removed loop? */
153 loop = loops->parray[i];
154 if (!loop)
155 continue;
157 if (loop->inner)
158 continue;
160 unswitch_single_loop (loops, loop, NULL_RTX, 0);
161 #ifdef ENABLE_CHECKING
162 verify_dominators (CDI_DOMINATORS);
163 verify_loop_structure (loops);
164 #endif
167 iv_analysis_done ();
170 /* Checks whether we can unswitch LOOP on condition at end of BB -- one of its
171 basic blocks (for what it means see comments below). In case condition
172 compares loop invariant cc mode register, return the jump in CINSN. */
174 static rtx
175 may_unswitch_on (basic_block bb, struct loop *loop, rtx *cinsn)
177 rtx test, at, insn, op[2], stest;
178 struct rtx_iv iv;
179 unsigned i;
180 enum machine_mode mode;
182 /* BB must end in a simple conditional jump. */
183 if (EDGE_COUNT (bb->succs) != 2)
184 return NULL_RTX;
185 if (!any_condjump_p (BB_END (bb)))
186 return NULL_RTX;
188 /* With branches inside loop. */
189 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 0)->dest)
190 || !flow_bb_inside_loop_p (loop, EDGE_SUCC (bb, 1)->dest))
191 return NULL_RTX;
193 /* It must be executed just once each iteration (because otherwise we
194 are unable to update dominator/irreducible loop information correctly). */
195 if (!just_once_each_iteration_p (loop, bb))
196 return NULL_RTX;
198 /* Condition must be invariant. */
199 test = get_condition (BB_END (bb), &at, true, false);
200 if (!test)
201 return NULL_RTX;
203 for (i = 0; i < 2; i++)
205 op[i] = XEXP (test, i);
207 if (CONSTANT_P (op[i]))
208 continue;
210 insn = iv_get_reaching_def (at, op[i]);
211 if (!iv_analyze (insn, op[i], &iv))
212 return NULL_RTX;
213 if (iv.step != const0_rtx
214 || iv.first_special)
215 return NULL_RTX;
217 op[i] = get_iv_value (&iv, const0_rtx);
220 mode = GET_MODE (op[0]);
221 if (mode == VOIDmode)
222 mode = GET_MODE (op[1]);
223 if (GET_MODE_CLASS (mode) == MODE_CC)
225 if (at != BB_END (bb))
226 return NULL_RTX;
228 *cinsn = BB_END (bb);
229 if (!rtx_equal_p (op[0], XEXP (test, 0))
230 || !rtx_equal_p (op[1], XEXP (test, 1)))
231 return NULL_RTX;
233 return test;
236 stest = simplify_gen_relational (GET_CODE (test), SImode,
237 mode, op[0], op[1]);
238 if (stest == const0_rtx
239 || stest == const_true_rtx)
240 return stest;
242 return canon_condition (gen_rtx_fmt_ee (GET_CODE (test), SImode,
243 op[0], op[1]));
246 /* Reverses CONDition; returns NULL if we cannot. */
248 reversed_condition (rtx cond)
250 enum rtx_code reversed;
251 reversed = reversed_comparison_code (cond, NULL);
252 if (reversed == UNKNOWN)
253 return NULL_RTX;
254 else
255 return gen_rtx_fmt_ee (reversed,
256 GET_MODE (cond), XEXP (cond, 0),
257 XEXP (cond, 1));
260 /* Unswitch single LOOP. COND_CHECKED holds list of conditions we already
261 unswitched on and are therefore known to be true in this LOOP. NUM is
262 number of unswitchings done; do not allow it to grow too much, it is too
263 easy to create example on that the code would grow exponentially. */
264 static void
265 unswitch_single_loop (struct loops *loops, struct loop *loop,
266 rtx cond_checked, int num)
268 basic_block *bbs;
269 struct loop *nloop;
270 unsigned i;
271 rtx cond, rcond = NULL_RTX, conds, rconds, acond, cinsn = NULL_RTX;
272 int repeat;
273 edge e;
275 /* Do not unswitch too much. */
276 if (num > PARAM_VALUE (PARAM_MAX_UNSWITCH_LEVEL))
278 if (dump_file)
279 fprintf (dump_file, ";; Not unswitching anymore, hit max level\n");
280 return;
283 /* Only unswitch innermost loops. */
284 if (loop->inner)
286 if (dump_file)
287 fprintf (dump_file, ";; Not unswitching, not innermost loop\n");
288 return;
291 /* We must be able to duplicate loop body. */
292 if (!can_duplicate_loop_p (loop))
294 if (dump_file)
295 fprintf (dump_file, ";; Not unswitching, can't duplicate loop\n");
296 return;
299 /* The loop should not be too large, to limit code growth. */
300 if (num_loop_insns (loop) > PARAM_VALUE (PARAM_MAX_UNSWITCH_INSNS))
302 if (dump_file)
303 fprintf (dump_file, ";; Not unswitching, loop too big\n");
304 return;
307 /* Do not unswitch in cold areas. */
308 if (!maybe_hot_bb_p (loop->header))
310 if (dump_file)
311 fprintf (dump_file, ";; Not unswitching, not hot area\n");
312 return;
315 /* Nor if the loop usually does not roll. */
316 if (expected_loop_iterations (loop) < 1)
318 if (dump_file)
319 fprintf (dump_file, ";; Not unswitching, loop iterations < 1\n");
320 return;
325 repeat = 0;
327 /* Find a bb to unswitch on. */
328 bbs = get_loop_body (loop);
329 iv_analysis_loop_init (loop);
330 for (i = 0; i < loop->num_nodes; i++)
331 if ((cond = may_unswitch_on (bbs[i], loop, &cinsn)))
332 break;
334 if (i == loop->num_nodes)
336 free (bbs);
337 return;
340 if (cond != const0_rtx
341 && cond != const_true_rtx)
343 rcond = reversed_condition (cond);
344 if (rcond)
345 rcond = canon_condition (rcond);
347 /* Check whether the result can be predicted. */
348 for (acond = cond_checked; acond; acond = XEXP (acond, 1))
349 simplify_using_condition (XEXP (acond, 0), &cond, NULL);
352 if (cond == const_true_rtx)
354 /* Remove false path. */
355 e = FALLTHRU_EDGE (bbs[i]);
356 remove_path (loops, e);
357 free (bbs);
358 repeat = 1;
360 else if (cond == const0_rtx)
362 /* Remove true path. */
363 e = BRANCH_EDGE (bbs[i]);
364 remove_path (loops, e);
365 free (bbs);
366 repeat = 1;
368 } while (repeat);
370 /* We found the condition we can unswitch on. */
371 conds = alloc_EXPR_LIST (0, cond, cond_checked);
372 if (rcond)
373 rconds = alloc_EXPR_LIST (0, rcond, cond_checked);
374 else
375 rconds = cond_checked;
377 if (dump_file)
378 fprintf (dump_file, ";; Unswitching loop\n");
380 /* Unswitch the loop on this condition. */
381 nloop = unswitch_loop (loops, loop, bbs[i], cond, cinsn);
382 if (!nloop)
383 abort ();
385 /* Invoke itself on modified loops. */
386 unswitch_single_loop (loops, nloop, rconds, num + 1);
387 unswitch_single_loop (loops, loop, conds, num + 1);
389 free_EXPR_LIST_node (conds);
390 if (rcond)
391 free_EXPR_LIST_node (rconds);
393 free (bbs);
396 /* Unswitch a LOOP w.r. to given basic block UNSWITCH_ON. We only support
397 unswitching of innermost loops. UNSWITCH_ON must be executed in every
398 iteration, i.e. it must dominate LOOP latch. COND is the condition
399 determining which loop is entered. Returns NULL if impossible, new loop
400 otherwise. The new loop is entered if COND is true. If CINSN is not
401 NULL, it is the insn in that COND is compared. */
403 static struct loop *
404 unswitch_loop (struct loops *loops, struct loop *loop, basic_block unswitch_on,
405 rtx cond, rtx cinsn)
407 edge entry, latch_edge, true_edge, false_edge, e;
408 basic_block switch_bb, unswitch_on_alt, src;
409 struct loop *nloop;
410 sbitmap zero_bitmap;
411 int irred_flag, prob;
412 rtx seq;
414 /* Some sanity checking. */
415 if (!flow_bb_inside_loop_p (loop, unswitch_on))
416 abort ();
417 if (EDGE_COUNT (unswitch_on->succs) != 2)
418 abort ();
419 if (!just_once_each_iteration_p (loop, unswitch_on))
420 abort ();
421 if (loop->inner)
422 abort ();
423 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 0)->dest))
424 abort ();
425 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (unswitch_on, 1)->dest))
426 abort ();
428 entry = loop_preheader_edge (loop);
430 /* Make a copy. */
431 src = entry->src;
432 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
433 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
434 zero_bitmap = sbitmap_alloc (2);
435 sbitmap_zero (zero_bitmap);
436 if (!duplicate_loop_to_header_edge (loop, entry, loops, 1,
437 zero_bitmap, NULL, NULL, NULL, 0))
438 return NULL;
439 free (zero_bitmap);
440 entry->flags |= irred_flag;
442 /* Record the block with condition we unswitch on. */
443 unswitch_on_alt = unswitch_on->rbi->copy;
444 true_edge = BRANCH_EDGE (unswitch_on_alt);
445 false_edge = FALLTHRU_EDGE (unswitch_on);
446 latch_edge = EDGE_SUCC (loop->latch->rbi->copy, 0);
448 /* Create a block with the condition. */
449 prob = true_edge->probability;
450 switch_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
451 seq = compare_and_jump_seq (XEXP (cond, 0), XEXP (cond, 1), GET_CODE (cond),
452 block_label (true_edge->dest),
453 prob, cinsn);
454 emit_insn_after (seq, BB_END (switch_bb));
455 e = make_edge (switch_bb, true_edge->dest, 0);
456 e->probability = prob;
457 e->count = latch_edge->count * prob / REG_BR_PROB_BASE;
458 e = make_edge (switch_bb, FALLTHRU_EDGE (unswitch_on)->dest, EDGE_FALLTHRU);
459 e->probability = false_edge->probability;
460 e->count = latch_edge->count * (false_edge->probability) / REG_BR_PROB_BASE;
462 if (irred_flag)
464 switch_bb->flags |= BB_IRREDUCIBLE_LOOP;
465 EDGE_SUCC (switch_bb, 0)->flags |= EDGE_IRREDUCIBLE_LOOP;
466 EDGE_SUCC (switch_bb, 1)->flags |= EDGE_IRREDUCIBLE_LOOP;
468 else
470 switch_bb->flags &= ~BB_IRREDUCIBLE_LOOP;
471 EDGE_SUCC (switch_bb, 0)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
472 EDGE_SUCC (switch_bb, 1)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
475 /* Loopify from the copy of LOOP body, constructing the new loop. */
476 nloop = loopify (loops, latch_edge,
477 EDGE_PRED (loop->header->rbi->copy, 0), switch_bb,
478 BRANCH_EDGE (switch_bb), FALLTHRU_EDGE (switch_bb), true);
480 /* Remove branches that are now unreachable in new loops. */
481 remove_path (loops, true_edge);
482 remove_path (loops, false_edge);
484 /* One of created loops do not have to be subloop of the outer loop now,
485 so fix its placement in loop data structure. */
486 fix_loop_placement (loop);
487 fix_loop_placement (nloop);
489 /* Preserve the simple loop preheaders. */
490 loop_split_edge_with (loop_preheader_edge (loop), NULL_RTX);
491 loop_split_edge_with (loop_preheader_edge (nloop), NULL_RTX);
493 return nloop;