1 /* Loop unswitching for GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "hard-reg-set.h"
27 #include "basic-block.h"
29 #include "cfglayout.h"
34 /* This pass moves constant conditions out of loops, duplicating the loop
35 in progress, i.e. this code:
49 where nothing inside the loop alters cond is transformed
74 Duplicating the loop might lead to code growth exponential in number of
75 branches inside loop, so we limit the number of unswitchings performed
76 in a single loop to PARAM_MAX_UNSWITCH_LEVEL. We only perform the
77 transformation on innermost loops, as the benefit of doing it on loops
78 containing subloops would not be very large compared to complications
79 with handling this case. */
81 static struct loop
*unswitch_loop (struct loops
*, struct loop
*,
82 basic_block
, rtx
, rtx
);
83 static void unswitch_single_loop (struct loops
*, struct loop
*, rtx
, int);
84 static rtx
may_unswitch_on (basic_block
, struct loop
*, rtx
*);
86 /* Prepare a sequence comparing OP0 with OP1 using COMP and jumping to LABEL if
87 true, with probability PROB. If CINSN is not NULL, it is the insn to copy
88 in order to create a jump. */
91 compare_and_jump_seq (rtx op0
, rtx op1
, enum rtx_code comp
, rtx label
, int prob
,
95 enum machine_mode mode
;
97 mode
= GET_MODE (op0
);
99 mode
= GET_MODE (op1
);
102 if (GET_MODE_CLASS (mode
) == MODE_CC
)
104 /* A hack -- there seems to be no easy generic way how to make a
105 conditional jump from a ccmode comparison. */
107 cond
= XEXP (SET_SRC (pc_set (cinsn
)), 0);
108 gcc_assert (GET_CODE (cond
) == comp
);
109 gcc_assert (rtx_equal_p (op0
, XEXP (cond
, 0)));
110 gcc_assert (rtx_equal_p (op1
, XEXP (cond
, 1)));
111 emit_jump_insn (copy_insn (PATTERN (cinsn
)));
112 jump
= get_last_insn ();
113 JUMP_LABEL (jump
) = JUMP_LABEL (cinsn
);
114 LABEL_NUSES (JUMP_LABEL (jump
))++;
115 redirect_jump (jump
, label
, 0);
121 op0
= force_operand (op0
, NULL_RTX
);
122 op1
= force_operand (op1
, NULL_RTX
);
123 do_compare_rtx_and_jump (op0
, op1
, comp
, 0,
124 mode
, NULL_RTX
, NULL_RTX
, label
);
125 jump
= get_last_insn ();
126 JUMP_LABEL (jump
) = label
;
127 LABEL_NUSES (label
)++;
129 REG_NOTES (jump
) = gen_rtx_EXPR_LIST (REG_BR_PROB
, GEN_INT (prob
),
137 /* Main entry point. Perform loop unswitching on all suitable LOOPS. */
139 unswitch_loops (struct loops
*loops
)
144 /* Go through inner loops (only original ones). */
147 for (i
= 1; i
< num
; i
++)
150 loop
= loops
->parray
[i
];
157 unswitch_single_loop (loops
, loop
, NULL_RTX
, 0);
158 #ifdef ENABLE_CHECKING
159 verify_dominators (CDI_DOMINATORS
);
160 verify_loop_structure (loops
);
167 /* Checks whether we can unswitch LOOP on condition at end of BB -- one of its
168 basic blocks (for what it means see comments below). In case condition
169 compares loop invariant cc mode register, return the jump in CINSN. */
172 may_unswitch_on (basic_block bb
, struct loop
*loop
, rtx
*cinsn
)
174 rtx test
, at
, op
[2], stest
;
177 enum machine_mode mode
;
179 /* BB must end in a simple conditional jump. */
180 if (EDGE_COUNT (bb
->succs
) != 2)
182 if (!any_condjump_p (BB_END (bb
)))
185 /* With branches inside loop. */
186 if (!flow_bb_inside_loop_p (loop
, EDGE_SUCC (bb
, 0)->dest
)
187 || !flow_bb_inside_loop_p (loop
, EDGE_SUCC (bb
, 1)->dest
))
190 /* It must be executed just once each iteration (because otherwise we
191 are unable to update dominator/irreducible loop information correctly). */
192 if (!just_once_each_iteration_p (loop
, bb
))
195 /* Condition must be invariant. */
196 test
= get_condition (BB_END (bb
), &at
, true, false);
200 for (i
= 0; i
< 2; i
++)
202 op
[i
] = XEXP (test
, i
);
204 if (CONSTANT_P (op
[i
]))
207 if (!iv_analyze (at
, op
[i
], &iv
))
209 if (iv
.step
!= const0_rtx
213 op
[i
] = get_iv_value (&iv
, const0_rtx
);
216 mode
= GET_MODE (op
[0]);
217 if (mode
== VOIDmode
)
218 mode
= GET_MODE (op
[1]);
219 if (GET_MODE_CLASS (mode
) == MODE_CC
)
221 if (at
!= BB_END (bb
))
224 if (!rtx_equal_p (op
[0], XEXP (test
, 0))
225 || !rtx_equal_p (op
[1], XEXP (test
, 1)))
228 *cinsn
= BB_END (bb
);
232 stest
= simplify_gen_relational (GET_CODE (test
), SImode
,
234 if (stest
== const0_rtx
235 || stest
== const_true_rtx
)
238 return canon_condition (gen_rtx_fmt_ee (GET_CODE (test
), SImode
,
242 /* Reverses CONDition; returns NULL if we cannot. */
244 reversed_condition (rtx cond
)
246 enum rtx_code reversed
;
247 reversed
= reversed_comparison_code (cond
, NULL
);
248 if (reversed
== UNKNOWN
)
251 return gen_rtx_fmt_ee (reversed
,
252 GET_MODE (cond
), XEXP (cond
, 0),
256 /* Unswitch single LOOP. COND_CHECKED holds list of conditions we already
257 unswitched on and are therefore known to be true in this LOOP. NUM is
258 number of unswitchings done; do not allow it to grow too much, it is too
259 easy to create example on that the code would grow exponentially. */
261 unswitch_single_loop (struct loops
*loops
, struct loop
*loop
,
262 rtx cond_checked
, int num
)
267 rtx cond
, rcond
= NULL_RTX
, conds
, rconds
, acond
, cinsn
;
271 /* Do not unswitch too much. */
272 if (num
> PARAM_VALUE (PARAM_MAX_UNSWITCH_LEVEL
))
275 fprintf (dump_file
, ";; Not unswitching anymore, hit max level\n");
279 /* Only unswitch innermost loops. */
283 fprintf (dump_file
, ";; Not unswitching, not innermost loop\n");
287 /* We must be able to duplicate loop body. */
288 if (!can_duplicate_loop_p (loop
))
291 fprintf (dump_file
, ";; Not unswitching, can't duplicate loop\n");
295 /* The loop should not be too large, to limit code growth. */
296 if (num_loop_insns (loop
) > PARAM_VALUE (PARAM_MAX_UNSWITCH_INSNS
))
299 fprintf (dump_file
, ";; Not unswitching, loop too big\n");
303 /* Do not unswitch in cold areas. */
304 if (!maybe_hot_bb_p (loop
->header
))
307 fprintf (dump_file
, ";; Not unswitching, not hot area\n");
311 /* Nor if the loop usually does not roll. */
312 if (expected_loop_iterations (loop
) < 1)
315 fprintf (dump_file
, ";; Not unswitching, loop iterations < 1\n");
324 /* Find a bb to unswitch on. */
325 bbs
= get_loop_body (loop
);
326 iv_analysis_loop_init (loop
);
327 for (i
= 0; i
< loop
->num_nodes
; i
++)
328 if ((cond
= may_unswitch_on (bbs
[i
], loop
, &cinsn
)))
331 if (i
== loop
->num_nodes
)
337 if (cond
!= const0_rtx
338 && cond
!= const_true_rtx
)
340 rcond
= reversed_condition (cond
);
342 rcond
= canon_condition (rcond
);
344 /* Check whether the result can be predicted. */
345 for (acond
= cond_checked
; acond
; acond
= XEXP (acond
, 1))
346 simplify_using_condition (XEXP (acond
, 0), &cond
, NULL
);
349 if (cond
== const_true_rtx
)
351 /* Remove false path. */
352 e
= FALLTHRU_EDGE (bbs
[i
]);
353 remove_path (loops
, e
);
357 else if (cond
== const0_rtx
)
359 /* Remove true path. */
360 e
= BRANCH_EDGE (bbs
[i
]);
361 remove_path (loops
, e
);
367 /* We found the condition we can unswitch on. */
368 conds
= alloc_EXPR_LIST (0, cond
, cond_checked
);
370 rconds
= alloc_EXPR_LIST (0, rcond
, cond_checked
);
372 rconds
= cond_checked
;
375 fprintf (dump_file
, ";; Unswitching loop\n");
377 /* Unswitch the loop on this condition. */
378 nloop
= unswitch_loop (loops
, loop
, bbs
[i
], cond
, cinsn
);
381 /* Invoke itself on modified loops. */
382 unswitch_single_loop (loops
, nloop
, rconds
, num
+ 1);
383 unswitch_single_loop (loops
, loop
, conds
, num
+ 1);
385 free_EXPR_LIST_node (conds
);
387 free_EXPR_LIST_node (rconds
);
392 /* Unswitch a LOOP w.r. to given basic block UNSWITCH_ON. We only support
393 unswitching of innermost loops. UNSWITCH_ON must be executed in every
394 iteration, i.e. it must dominate LOOP latch. COND is the condition
395 determining which loop is entered. Returns NULL if impossible, new loop
396 otherwise. The new loop is entered if COND is true. If CINSN is not
397 NULL, it is the insn in that COND is compared. */
400 unswitch_loop (struct loops
*loops
, struct loop
*loop
, basic_block unswitch_on
,
403 edge entry
, latch_edge
, true_edge
, false_edge
, e
;
404 basic_block switch_bb
, unswitch_on_alt
;
407 int irred_flag
, prob
;
410 /* Some sanity checking. */
411 gcc_assert (flow_bb_inside_loop_p (loop
, unswitch_on
));
412 gcc_assert (EDGE_COUNT (unswitch_on
->succs
) == 2);
413 gcc_assert (just_once_each_iteration_p (loop
, unswitch_on
));
414 gcc_assert (!loop
->inner
);
415 gcc_assert (flow_bb_inside_loop_p (loop
, EDGE_SUCC (unswitch_on
, 0)->dest
));
416 gcc_assert (flow_bb_inside_loop_p (loop
, EDGE_SUCC (unswitch_on
, 1)->dest
));
418 entry
= loop_preheader_edge (loop
);
421 irred_flag
= entry
->flags
& EDGE_IRREDUCIBLE_LOOP
;
422 entry
->flags
&= ~EDGE_IRREDUCIBLE_LOOP
;
423 zero_bitmap
= sbitmap_alloc (2);
424 sbitmap_zero (zero_bitmap
);
425 if (!duplicate_loop_to_header_edge (loop
, entry
, loops
, 1,
426 zero_bitmap
, NULL
, NULL
, NULL
, 0))
428 sbitmap_free (zero_bitmap
);
431 sbitmap_free (zero_bitmap
);
432 entry
->flags
|= irred_flag
;
434 /* Record the block with condition we unswitch on. */
435 unswitch_on_alt
= get_bb_copy (unswitch_on
);
436 true_edge
= BRANCH_EDGE (unswitch_on_alt
);
437 false_edge
= FALLTHRU_EDGE (unswitch_on
);
438 latch_edge
= single_succ_edge (get_bb_copy (loop
->latch
));
440 /* Create a block with the condition. */
441 prob
= true_edge
->probability
;
442 switch_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
443 seq
= compare_and_jump_seq (XEXP (cond
, 0), XEXP (cond
, 1), GET_CODE (cond
),
444 block_label (true_edge
->dest
),
446 emit_insn_after (seq
, BB_END (switch_bb
));
447 e
= make_edge (switch_bb
, true_edge
->dest
, 0);
448 e
->probability
= prob
;
449 e
->count
= latch_edge
->count
* prob
/ REG_BR_PROB_BASE
;
450 e
= make_edge (switch_bb
, FALLTHRU_EDGE (unswitch_on
)->dest
, EDGE_FALLTHRU
);
451 e
->probability
= false_edge
->probability
;
452 e
->count
= latch_edge
->count
* (false_edge
->probability
) / REG_BR_PROB_BASE
;
456 switch_bb
->flags
|= BB_IRREDUCIBLE_LOOP
;
457 EDGE_SUCC (switch_bb
, 0)->flags
|= EDGE_IRREDUCIBLE_LOOP
;
458 EDGE_SUCC (switch_bb
, 1)->flags
|= EDGE_IRREDUCIBLE_LOOP
;
462 switch_bb
->flags
&= ~BB_IRREDUCIBLE_LOOP
;
463 EDGE_SUCC (switch_bb
, 0)->flags
&= ~EDGE_IRREDUCIBLE_LOOP
;
464 EDGE_SUCC (switch_bb
, 1)->flags
&= ~EDGE_IRREDUCIBLE_LOOP
;
467 /* Loopify from the copy of LOOP body, constructing the new loop. */
468 nloop
= loopify (loops
, latch_edge
,
469 single_pred_edge (get_bb_copy (loop
->header
)), switch_bb
,
470 BRANCH_EDGE (switch_bb
), FALLTHRU_EDGE (switch_bb
), true);
472 /* Remove branches that are now unreachable in new loops. */
473 remove_path (loops
, true_edge
);
474 remove_path (loops
, false_edge
);
476 /* One of created loops do not have to be subloop of the outer loop now,
477 so fix its placement in loop data structure. */
478 fix_loop_placement (loop
);
479 fix_loop_placement (nloop
);
481 /* Preserve the simple loop preheaders. */
482 loop_split_edge_with (loop_preheader_edge (loop
), NULL_RTX
);
483 loop_split_edge_with (loop_preheader_edge (nloop
), NULL_RTX
);