1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
54 #define NULL_EDGE ((struct edge_def *)NULL)
55 #define NULL_BLOCK ((struct basic_block_def *)NULL)
57 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
58 static int num_possible_if_blocks
;
60 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
62 static int num_updated_if_blocks
;
64 /* # of basic blocks that were removed. */
65 static int num_removed_blocks
;
67 /* The post-dominator relation on the original block numbers. */
68 static sbitmap
*post_dominators
;
70 /* Forward references. */
71 static int count_bb_insns
PARAMS ((basic_block
));
72 static rtx first_active_insn
PARAMS ((basic_block
));
73 static int last_active_insn_p
PARAMS ((basic_block
, rtx
));
74 static int seq_contains_jump
PARAMS ((rtx
));
76 static int cond_exec_process_insns
PARAMS ((rtx
, rtx
, rtx
, rtx
, int));
77 static rtx cond_exec_get_condition
PARAMS ((rtx
));
78 static int cond_exec_process_if_block
PARAMS ((basic_block
, basic_block
,
79 basic_block
, basic_block
));
81 static rtx noce_get_condition
PARAMS ((rtx
, rtx
*));
82 static int noce_process_if_block
PARAMS ((basic_block
, basic_block
,
83 basic_block
, basic_block
));
85 static int process_if_block
PARAMS ((basic_block
, basic_block
,
86 basic_block
, basic_block
));
87 static void merge_if_block
PARAMS ((basic_block
, basic_block
,
88 basic_block
, basic_block
));
90 static int find_if_header
PARAMS ((basic_block
));
91 static int find_if_block
PARAMS ((basic_block
, edge
, edge
));
92 static int find_if_case_1
PARAMS ((basic_block
, edge
, edge
));
93 static int find_if_case_2
PARAMS ((basic_block
, edge
, edge
));
94 static int find_memory
PARAMS ((rtx
*, void *));
95 static int dead_or_predicable
PARAMS ((basic_block
, basic_block
,
96 basic_block
, rtx
, int));
97 static void noce_emit_move_insn
PARAMS ((rtx
, rtx
));
99 /* Abuse the basic_block AUX field to store the original block index,
100 as well as a flag indicating that the block should be rescaned for
103 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
104 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
105 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
106 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
109 /* Count the number of non-jump active insns in BB. */
120 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == INSN
)
125 insn
= NEXT_INSN (insn
);
131 /* Return the first non-jump active insn in the basic block. */
134 first_active_insn (bb
)
139 if (GET_CODE (insn
) == CODE_LABEL
)
143 insn
= NEXT_INSN (insn
);
146 while (GET_CODE (insn
) == NOTE
)
150 insn
= NEXT_INSN (insn
);
153 if (GET_CODE (insn
) == JUMP_INSN
)
159 /* Return true if INSN is the last active non-jump insn in BB. */
162 last_active_insn_p (bb
, insn
)
170 insn
= NEXT_INSN (insn
);
172 while (GET_CODE (insn
) == NOTE
);
174 return GET_CODE (insn
) == JUMP_INSN
;
177 /* It is possible, especially when having dealt with multi-word
178 arithmetic, for the expanders to have emitted jumps. Search
179 through the sequence and return TRUE if a jump exists so that
180 we can abort the conversion. */
183 seq_contains_jump (insn
)
188 if (GET_CODE (insn
) == JUMP_INSN
)
190 insn
= NEXT_INSN (insn
);
195 /* Go through a bunch of insns, converting them to conditional
196 execution format if possible. Return TRUE if all of the non-note
197 insns were processed. */
200 cond_exec_process_insns (start
, end
, test
, prob_val
, mod_ok
)
201 rtx start
; /* first insn to look at */
202 rtx end
; /* last insn to look at */
203 rtx test
; /* conditional execution test */
204 rtx prob_val
; /* probability of branch taken. */
205 int mod_ok
; /* true if modifications ok last insn. */
207 int must_be_last
= FALSE
;
211 for (insn
= start
; ; insn
= NEXT_INSN (insn
))
213 if (GET_CODE (insn
) == NOTE
)
216 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
219 /* Remove USE insns that get in the way. */
220 if (reload_completed
&& GET_CODE (PATTERN (insn
)) == USE
)
222 /* ??? Ug. Actually unlinking the thing is problematic,
223 given what we'd have to coordinate with our callers. */
224 PUT_CODE (insn
, NOTE
);
225 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
226 NOTE_SOURCE_FILE (insn
) = 0;
230 /* Last insn wasn't last? */
234 if (modified_in_p (test
, insn
))
241 /* Now build the conditional form of the instruction. */
242 pattern
= PATTERN (insn
);
244 /* If the machine needs to modify the insn being conditionally executed,
245 say for example to force a constant integer operand into a temp
246 register, do so here. */
247 #ifdef IFCVT_MODIFY_INSN
248 IFCVT_MODIFY_INSN (pattern
, insn
);
253 validate_change (insn
, &PATTERN (insn
),
254 gen_rtx_COND_EXEC (VOIDmode
, copy_rtx (test
),
257 if (GET_CODE (insn
) == CALL_INSN
&& prob_val
)
258 validate_change (insn
, ®_NOTES (insn
),
259 alloc_EXPR_LIST (REG_BR_PROB
, prob_val
,
260 REG_NOTES (insn
)), 1);
270 /* Return the condition for a jump. Do not do any special processing. */
273 cond_exec_get_condition (jump
)
278 if (any_condjump_p (jump
))
279 test_if
= SET_SRC (pc_set (jump
));
282 cond
= XEXP (test_if
, 0);
284 /* If this branches to JUMP_LABEL when the condition is false,
285 reverse the condition. */
286 if (GET_CODE (XEXP (test_if
, 2)) == LABEL_REF
287 && XEXP (XEXP (test_if
, 2), 0) == JUMP_LABEL (jump
))
288 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
289 GET_MODE (cond
), XEXP (cond
, 0),
295 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
296 to conditional execution. Return TRUE if we were successful at
297 converting the the block. */
300 cond_exec_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
301 basic_block test_bb
; /* Basic block test is in */
302 basic_block then_bb
; /* Basic block for THEN block */
303 basic_block else_bb
; /* Basic block for ELSE block */
304 basic_block join_bb
; /* Basic block the join label is in */
306 rtx test_expr
; /* expression in IF_THEN_ELSE that is tested */
307 rtx then_start
; /* first insn in THEN block */
308 rtx then_end
; /* last insn + 1 in THEN block */
309 rtx else_start
= NULL_RTX
; /* first insn in ELSE block or NULL */
310 rtx else_end
= NULL_RTX
; /* last insn + 1 in ELSE block */
311 int max
; /* max # of insns to convert. */
312 int then_mod_ok
; /* whether conditional mods are ok in THEN */
313 rtx true_expr
; /* test for else block insns */
314 rtx false_expr
; /* test for then block insns */
315 rtx true_prob_val
; /* probability of else block */
316 rtx false_prob_val
; /* probability of then block */
319 /* Find the conditional jump to the ELSE or JOIN part, and isolate
321 test_expr
= cond_exec_get_condition (test_bb
->end
);
325 /* If the conditional jump is more than just a conditional jump,
326 then we can not do conditional execution conversion on this block. */
327 if (!onlyjump_p (test_bb
->end
))
330 /* Collect the bounds of where we're to search. */
332 then_start
= then_bb
->head
;
333 then_end
= then_bb
->end
;
335 /* Skip a label heading THEN block. */
336 if (GET_CODE (then_start
) == CODE_LABEL
)
337 then_start
= NEXT_INSN (then_start
);
339 /* Skip a (use (const_int 0)) or branch as the final insn. */
340 if (GET_CODE (then_end
) == INSN
341 && GET_CODE (PATTERN (then_end
)) == USE
342 && GET_CODE (XEXP (PATTERN (then_end
), 0)) == CONST_INT
)
343 then_end
= PREV_INSN (then_end
);
344 else if (GET_CODE (then_end
) == JUMP_INSN
)
345 then_end
= PREV_INSN (then_end
);
349 /* Skip the ELSE block's label. */
350 else_start
= NEXT_INSN (else_bb
->head
);
351 else_end
= else_bb
->end
;
353 /* Skip a (use (const_int 0)) or branch as the final insn. */
354 if (GET_CODE (else_end
) == INSN
355 && GET_CODE (PATTERN (else_end
)) == USE
356 && GET_CODE (XEXP (PATTERN (else_end
), 0)) == CONST_INT
)
357 else_end
= PREV_INSN (else_end
);
358 else if (GET_CODE (else_end
) == JUMP_INSN
)
359 else_end
= PREV_INSN (else_end
);
362 /* How many instructions should we convert in total? */
366 max
= 2 * MAX_CONDITIONAL_EXECUTE
;
367 n_insns
= count_bb_insns (else_bb
);
370 max
= MAX_CONDITIONAL_EXECUTE
;
371 n_insns
+= count_bb_insns (then_bb
);
375 /* Map test_expr/test_jump into the appropriate MD tests to use on
376 the conditionally executed code. */
378 true_expr
= test_expr
;
379 false_expr
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr
)),
380 GET_MODE (true_expr
), XEXP (true_expr
, 0),
381 XEXP (true_expr
, 1));
383 #ifdef IFCVT_MODIFY_TESTS
384 /* If the machine description needs to modify the tests, such as setting a
385 conditional execution register from a comparison, it can do so here. */
386 IFCVT_MODIFY_TESTS (true_expr
, false_expr
, test_bb
, then_bb
, else_bb
,
389 /* See if the conversion failed */
390 if (!true_expr
|| !false_expr
)
394 true_prob_val
= find_reg_note (test_bb
->end
, REG_BR_PROB
, NULL_RTX
);
397 true_prob_val
= XEXP (true_prob_val
, 0);
398 false_prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (true_prob_val
));
401 false_prob_val
= NULL_RTX
;
403 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
404 on then THEN block. */
405 then_mod_ok
= (else_bb
== NULL_BLOCK
);
407 /* Go through the THEN and ELSE blocks converting the insns if possible
408 to conditional execution. */
411 && ! cond_exec_process_insns (then_start
, then_end
,
412 false_expr
, false_prob_val
, then_mod_ok
))
416 && ! cond_exec_process_insns (else_start
, else_end
,
417 true_expr
, true_prob_val
, TRUE
))
420 if (! apply_change_group ())
423 #ifdef IFCVT_MODIFY_FINAL
424 /* Do any machine dependent final modifications */
425 IFCVT_MODIFY_FINAL (test_bb
, then_bb
, else_bb
, join_bb
);
428 /* Conversion succeeded. */
430 fprintf (rtl_dump_file
, "%d insn%s converted to conditional execution.\n",
431 n_insns
, (n_insns
== 1) ? " was" : "s were");
433 /* Merge the blocks! */
434 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
438 #ifdef IFCVT_MODIFY_CANCEL
439 /* Cancel any machine dependent changes. */
440 IFCVT_MODIFY_CANCEL (test_bb
, then_bb
, else_bb
, join_bb
);
447 /* Used by noce_process_if_block to communicate with its subroutines.
449 The subroutines know that A and B may be evaluated freely. They
450 know that X is a register. They should insert new instructions
451 before cond_earliest. */
457 rtx jump
, cond
, cond_earliest
;
460 static rtx noce_emit_store_flag
PARAMS ((struct noce_if_info
*,
462 static int noce_try_store_flag
PARAMS ((struct noce_if_info
*));
463 static int noce_try_store_flag_inc
PARAMS ((struct noce_if_info
*));
464 static int noce_try_store_flag_constants
PARAMS ((struct noce_if_info
*));
465 static int noce_try_store_flag_mask
PARAMS ((struct noce_if_info
*));
466 static rtx noce_emit_cmove
PARAMS ((struct noce_if_info
*,
467 rtx
, enum rtx_code
, rtx
,
469 static int noce_try_cmove
PARAMS ((struct noce_if_info
*));
470 static int noce_try_cmove_arith
PARAMS ((struct noce_if_info
*));
472 /* Helper function for noce_try_store_flag*. */
475 noce_emit_store_flag (if_info
, x
, reversep
, normalize
)
476 struct noce_if_info
*if_info
;
478 int reversep
, normalize
;
480 rtx cond
= if_info
->cond
;
484 cond_complex
= (! general_operand (XEXP (cond
, 0), VOIDmode
)
485 || ! general_operand (XEXP (cond
, 1), VOIDmode
));
487 /* If earliest == jump, or when the condition is complex, try to
488 build the store_flag insn directly. */
491 cond
= XEXP (SET_SRC (pc_set (if_info
->jump
)), 0);
493 if ((if_info
->cond_earliest
== if_info
->jump
|| cond_complex
)
494 && (normalize
== 0 || STORE_FLAG_VALUE
== normalize
))
498 code
= GET_CODE (cond
);
500 code
= reverse_condition (code
);
502 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (cond
, 0),
504 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
507 tmp
= emit_insn (tmp
);
509 if (recog_memoized (tmp
) >= 0)
515 if_info
->cond_earliest
= if_info
->jump
;
523 /* Don't even try if the comparison operands are weird. */
527 code
= GET_CODE (cond
);
529 code
= reverse_condition (code
);
531 return emit_store_flag (x
, code
, XEXP (cond
, 0),
532 XEXP (cond
, 1), VOIDmode
,
533 (code
== LTU
|| code
== LEU
534 || code
== GEU
|| code
== GTU
), normalize
);
537 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
539 noce_emit_move_insn (x
, y
)
542 enum machine_mode outmode
, inmode
;
546 if (GET_CODE (x
) != STRICT_LOW_PART
)
548 emit_move_insn (x
, y
);
553 inner
= XEXP (outer
, 0);
554 outmode
= GET_MODE (outer
);
555 inmode
= GET_MODE (inner
);
556 bitpos
= SUBREG_WORD (outer
) * BITS_PER_WORD
;
557 if (BYTES_BIG_ENDIAN
)
558 bitpos
+= (GET_MODE_BITSIZE (inmode
) - GET_MODE_BITSIZE (outmode
))
560 store_bit_field (inner
, GET_MODE_BITSIZE (outmode
),
561 bitpos
, outmode
, y
, GET_MODE_BITSIZE (inmode
),
562 GET_MODE_BITSIZE (inmode
));
565 /* Convert "if (test) x = 1; else x = 0".
567 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
568 tried in noce_try_store_flag_constants after noce_try_cmove has had
569 a go at the conversion. */
572 noce_try_store_flag (if_info
)
573 struct noce_if_info
*if_info
;
578 if (GET_CODE (if_info
->b
) == CONST_INT
579 && INTVAL (if_info
->b
) == STORE_FLAG_VALUE
580 && if_info
->a
== const0_rtx
)
582 else if (if_info
->b
== const0_rtx
583 && GET_CODE (if_info
->a
) == CONST_INT
584 && INTVAL (if_info
->a
) == STORE_FLAG_VALUE
585 && can_reverse_comparison_p (if_info
->cond
, if_info
->jump
))
592 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, 0);
595 if (target
!= if_info
->x
)
596 noce_emit_move_insn (if_info
->x
, target
);
600 emit_insns_before (seq
, if_info
->cond_earliest
);
611 /* Convert "if (test) x = a; else x = b", for A and B constant. */
614 noce_try_store_flag_constants (if_info
)
615 struct noce_if_info
*if_info
;
619 HOST_WIDE_INT itrue
, ifalse
, diff
, tmp
;
620 int normalize
, can_reverse
;
623 && GET_CODE (if_info
->a
) == CONST_INT
624 && GET_CODE (if_info
->b
) == CONST_INT
)
626 ifalse
= INTVAL (if_info
->a
);
627 itrue
= INTVAL (if_info
->b
);
628 diff
= itrue
- ifalse
;
630 can_reverse
= can_reverse_comparison_p (if_info
->cond
, if_info
->jump
);
633 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
635 else if (ifalse
== 0 && exact_log2 (itrue
) >= 0
636 && (STORE_FLAG_VALUE
== 1
637 || BRANCH_COST
>= 2))
639 else if (itrue
== 0 && exact_log2 (ifalse
) >= 0 && can_reverse
640 && (STORE_FLAG_VALUE
== 1 || BRANCH_COST
>= 2))
641 normalize
= 1, reversep
= 1;
643 && (STORE_FLAG_VALUE
== -1
644 || BRANCH_COST
>= 2))
646 else if (ifalse
== -1 && can_reverse
647 && (STORE_FLAG_VALUE
== -1 || BRANCH_COST
>= 2))
648 normalize
= -1, reversep
= 1;
649 else if ((BRANCH_COST
>= 2 && STORE_FLAG_VALUE
== -1)
657 tmp
= itrue
; itrue
= ifalse
; ifalse
= tmp
;
662 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, normalize
);
669 /* if (test) x = 3; else x = 4;
670 => x = 3 + (test == 0); */
671 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
673 target
= expand_binop (GET_MODE (if_info
->x
),
674 (diff
== STORE_FLAG_VALUE
675 ? add_optab
: sub_optab
),
676 GEN_INT (ifalse
), target
, if_info
->x
, 0,
680 /* if (test) x = 8; else x = 0;
681 => x = (test != 0) << 3; */
682 else if (ifalse
== 0 && (tmp
= exact_log2 (itrue
)) >= 0)
684 target
= expand_binop (GET_MODE (if_info
->x
), ashl_optab
,
685 target
, GEN_INT (tmp
), if_info
->x
, 0,
689 /* if (test) x = -1; else x = b;
690 => x = -(test != 0) | b; */
691 else if (itrue
== -1)
693 target
= expand_binop (GET_MODE (if_info
->x
), ior_optab
,
694 target
, GEN_INT (ifalse
), if_info
->x
, 0,
698 /* if (test) x = a; else x = b;
699 => x = (-(test != 0) & (b - a)) + a; */
702 target
= expand_binop (GET_MODE (if_info
->x
), and_optab
,
703 target
, GEN_INT (diff
), if_info
->x
, 0,
706 target
= expand_binop (GET_MODE (if_info
->x
), add_optab
,
707 target
, GEN_INT (ifalse
), if_info
->x
, 0,
717 if (target
!= if_info
->x
)
718 noce_emit_move_insn (if_info
->x
, target
);
723 if (seq_contains_jump (seq
))
726 emit_insns_before (seq
, if_info
->cond_earliest
);
734 /* Convert "if (test) foo++" into "foo += (test != 0)", and
735 similarly for "foo--". */
738 noce_try_store_flag_inc (if_info
)
739 struct noce_if_info
*if_info
;
742 int subtract
, normalize
;
748 /* Should be no `else' case to worry about. */
749 && if_info
->b
== if_info
->x
750 && GET_CODE (if_info
->a
) == PLUS
751 && (XEXP (if_info
->a
, 1) == const1_rtx
752 || XEXP (if_info
->a
, 1) == constm1_rtx
)
753 && rtx_equal_p (XEXP (if_info
->a
, 0), if_info
->x
)
754 && can_reverse_comparison_p (if_info
->cond
, if_info
->jump
))
756 if (STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
757 subtract
= 0, normalize
= 0;
758 else if (-STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
759 subtract
= 1, normalize
= 0;
761 subtract
= 0, normalize
= INTVAL (XEXP (if_info
->a
, 1));
765 target
= noce_emit_store_flag (if_info
,
766 gen_reg_rtx (GET_MODE (if_info
->x
)),
770 target
= expand_binop (GET_MODE (if_info
->x
),
771 subtract
? sub_optab
: add_optab
,
772 if_info
->x
, target
, if_info
->x
, 0, OPTAB_WIDEN
);
775 if (target
!= if_info
->x
)
776 noce_emit_move_insn (if_info
->x
, target
);
781 if (seq_contains_jump (seq
))
784 emit_insns_before (seq
, if_info
->cond_earliest
);
795 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
798 noce_try_store_flag_mask (if_info
)
799 struct noce_if_info
*if_info
;
807 || STORE_FLAG_VALUE
== -1)
808 && ((if_info
->a
== const0_rtx
809 && rtx_equal_p (if_info
->b
, if_info
->x
))
810 || ((reversep
= can_reverse_comparison_p (if_info
->cond
,
812 && if_info
->b
== const0_rtx
813 && rtx_equal_p (if_info
->a
, if_info
->x
))))
816 target
= noce_emit_store_flag (if_info
,
817 gen_reg_rtx (GET_MODE (if_info
->x
)),
820 target
= expand_binop (GET_MODE (if_info
->x
), and_optab
,
821 if_info
->x
, target
, if_info
->x
, 0,
826 if (target
!= if_info
->x
)
827 noce_emit_move_insn (if_info
->x
, target
);
832 if (seq_contains_jump (seq
))
835 emit_insns_before (seq
, if_info
->cond_earliest
);
846 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
849 noce_emit_cmove (if_info
, x
, code
, cmp_a
, cmp_b
, vfalse
, vtrue
)
850 struct noce_if_info
*if_info
;
851 rtx x
, cmp_a
, cmp_b
, vfalse
, vtrue
;
854 /* If earliest == jump, try to build the cmove insn directly.
855 This is helpful when combine has created some complex condition
856 (like for alpha's cmovlbs) that we can't hope to regenerate
857 through the normal interface. */
859 if (if_info
->cond_earliest
== if_info
->jump
)
863 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (if_info
->cond
), cmp_a
, cmp_b
);
864 tmp
= gen_rtx_IF_THEN_ELSE (GET_MODE (x
), tmp
, vtrue
, vfalse
);
865 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
868 tmp
= emit_insn (tmp
);
870 if (recog_memoized (tmp
) >= 0)
882 /* Don't even try if the comparison operands are weird. */
883 if (! general_operand (cmp_a
, GET_MODE (cmp_a
))
884 || ! general_operand (cmp_b
, GET_MODE (cmp_b
)))
887 #if HAVE_conditional_move
888 return emit_conditional_move (x
, code
, cmp_a
, cmp_b
, VOIDmode
,
889 vtrue
, vfalse
, GET_MODE (x
),
890 (code
== LTU
|| code
== GEU
891 || code
== LEU
|| code
== GTU
));
893 /* We'll never get here, as noce_process_if_block doesn't call the
894 functions involved. Ifdef code, however, should be discouraged
895 because it leads to typos in the code not selected. However,
896 emit_conditional_move won't exist either. */
901 /* Try only simple constants and registers here. More complex cases
902 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
903 has had a go at it. */
906 noce_try_cmove (if_info
)
907 struct noce_if_info
*if_info
;
912 if ((CONSTANT_P (if_info
->a
) || register_operand (if_info
->a
, VOIDmode
))
913 && (CONSTANT_P (if_info
->b
) || register_operand (if_info
->b
, VOIDmode
)))
917 code
= GET_CODE (if_info
->cond
);
918 target
= noce_emit_cmove (if_info
, if_info
->x
, code
,
919 XEXP (if_info
->cond
, 0),
920 XEXP (if_info
->cond
, 1),
921 if_info
->a
, if_info
->b
);
925 if (target
!= if_info
->x
)
926 noce_emit_move_insn (if_info
->x
, target
);
930 emit_insns_before (seq
, if_info
->cond_earliest
);
943 /* Try more complex cases involving conditional_move. */
946 noce_try_cmove_arith (if_info
)
947 struct noce_if_info
*if_info
;
957 /* A conditional move from two memory sources is equivalent to a
958 conditional on their addresses followed by a load. Don't do this
959 early because it'll screw alias analysis. Note that we've
960 already checked for no side effects. */
961 if (! no_new_pseudos
&& cse_not_expected
962 && GET_CODE (a
) == MEM
&& GET_CODE (b
) == MEM
967 x
= gen_reg_rtx (Pmode
);
971 /* ??? We could handle this if we knew that a load from A or B could
972 not fault. This is also true if we've already loaded
973 from the address along the path from ENTRY. */
974 else if (may_trap_p (a
) || may_trap_p (b
))
977 /* if (test) x = a + b; else x = c - d;
984 code
= GET_CODE (if_info
->cond
);
985 insn_a
= if_info
->insn_a
;
986 insn_b
= if_info
->insn_b
;
988 /* Possibly rearrange operands to make things come out more natural. */
989 if (can_reverse_comparison_p (if_info
->cond
, if_info
->jump
))
992 if (rtx_equal_p (b
, x
))
994 else if (general_operand (b
, GET_MODE (b
)))
999 code
= reverse_condition (code
);
1000 tmp
= a
, a
= b
, b
= tmp
;
1001 tmp
= insn_a
, insn_a
= insn_b
, insn_b
= tmp
;
1007 /* If either operand is complex, load it into a register first.
1008 The best way to do this is to copy the original insn. In this
1009 way we preserve any clobbers etc that the insn may have had.
1010 This is of course not possible in the IS_MEM case. */
1011 if (! general_operand (a
, GET_MODE (a
)))
1016 goto end_seq_and_fail
;
1020 tmp
= gen_reg_rtx (GET_MODE (a
));
1021 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, a
));
1024 goto end_seq_and_fail
;
1027 a
= gen_reg_rtx (GET_MODE (a
));
1028 tmp
= copy_rtx (insn_a
);
1029 set
= single_set (tmp
);
1031 tmp
= emit_insn (PATTERN (tmp
));
1033 if (recog_memoized (tmp
) < 0)
1034 goto end_seq_and_fail
;
1036 if (! general_operand (b
, GET_MODE (b
)))
1041 goto end_seq_and_fail
;
1045 tmp
= gen_reg_rtx (GET_MODE (b
));
1046 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, b
));
1049 goto end_seq_and_fail
;
1052 b
= gen_reg_rtx (GET_MODE (b
));
1053 tmp
= copy_rtx (insn_b
);
1054 set
= single_set (tmp
);
1056 tmp
= emit_insn (PATTERN (tmp
));
1058 if (recog_memoized (tmp
) < 0)
1059 goto end_seq_and_fail
;
1062 target
= noce_emit_cmove (if_info
, x
, code
, XEXP (if_info
->cond
, 0),
1063 XEXP (if_info
->cond
, 1), a
, b
);
1066 goto end_seq_and_fail
;
1068 /* If we're handling a memory for above, emit the load now. */
1071 tmp
= gen_rtx_MEM (GET_MODE (if_info
->x
), target
);
1073 /* Copy over flags as appropriate. */
1074 if (MEM_VOLATILE_P (if_info
->a
) || MEM_VOLATILE_P (if_info
->b
))
1075 MEM_VOLATILE_P (tmp
) = 1;
1076 if (MEM_IN_STRUCT_P (if_info
->a
) && MEM_IN_STRUCT_P (if_info
->b
))
1077 MEM_IN_STRUCT_P (tmp
) = 1;
1078 if (MEM_SCALAR_P (if_info
->a
) && MEM_SCALAR_P (if_info
->b
))
1079 MEM_SCALAR_P (tmp
) = 1;
1080 if (MEM_ALIAS_SET (if_info
->a
) == MEM_ALIAS_SET (if_info
->b
))
1081 MEM_ALIAS_SET (tmp
) = MEM_ALIAS_SET (if_info
->a
);
1083 noce_emit_move_insn (if_info
->x
, tmp
);
1085 else if (target
!= x
)
1086 noce_emit_move_insn (x
, target
);
1090 emit_insns_before (tmp
, if_info
->cond_earliest
);
1098 /* Look for the condition for the jump first. We'd prefer to avoid
1099 get_condition if we can -- it tries to look back for the contents
1100 of an original compare. On targets that use normal integers for
1101 comparisons, e.g. alpha, this is wasteful. */
1104 noce_get_condition (jump
, earliest
)
1111 /* If the condition variable is a register and is MODE_INT, accept it.
1112 Otherwise, fall back on get_condition. */
1114 if (! any_condjump_p (jump
))
1117 set
= pc_set (jump
);
1119 cond
= XEXP (SET_SRC (set
), 0);
1120 if (GET_CODE (XEXP (cond
, 0)) == REG
1121 && GET_MODE_CLASS (GET_MODE (XEXP (cond
, 0))) == MODE_INT
)
1125 /* If this branches to JUMP_LABEL when the condition is false,
1126 reverse the condition. */
1127 if (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1128 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
))
1129 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
1130 GET_MODE (cond
), XEXP (cond
, 0),
1134 cond
= get_condition (jump
, earliest
);
1139 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1140 without using conditional execution. Return TRUE if we were
1141 successful at converting the the block. */
1144 noce_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1145 basic_block test_bb
; /* Basic block test is in */
1146 basic_block then_bb
; /* Basic block for THEN block */
1147 basic_block else_bb
; /* Basic block for ELSE block */
1148 basic_block join_bb
; /* Basic block the join label is in */
1150 /* We're looking for patterns of the form
1152 (1) if (...) x = a; else x = b;
1153 (2) x = b; if (...) x = a;
1154 (3) if (...) x = a; // as if with an initial x = x.
1156 The later patterns require jumps to be more expensive.
1158 ??? For future expansion, look for multiple X in such patterns. */
1160 struct noce_if_info if_info
;
1163 rtx orig_x
, x
, a
, b
;
1164 rtx jump
, cond
, insn
;
1166 /* If this is not a standard conditional jump, we can't parse it. */
1167 jump
= test_bb
->end
;
1168 cond
= noce_get_condition (jump
, &if_info
.cond_earliest
);
1172 /* If the conditional jump is more than just a conditional jump,
1173 then we can not do if-conversion on this block. */
1174 if (! onlyjump_p (jump
))
1177 /* We must be comparing objects whose modes imply the size. */
1178 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
1181 /* Look for one of the potential sets. */
1182 insn_a
= first_active_insn (then_bb
);
1184 || ! last_active_insn_p (then_bb
, insn_a
)
1185 || (set_a
= single_set (insn_a
)) == NULL_RTX
)
1188 x
= SET_DEST (set_a
);
1189 a
= SET_SRC (set_a
);
1191 /* Look for the other potential set. Make sure we've got equivalent
1193 /* ??? This is overconservative. Storing to two different mems is
1194 as easy as conditionally computing the address. Storing to a
1195 single mem merely requires a scratch memory to use as one of the
1196 destination addresses; often the memory immediately below the
1197 stack pointer is available for this. */
1201 insn_b
= first_active_insn (else_bb
);
1203 || ! last_active_insn_p (else_bb
, insn_b
)
1204 || (set_b
= single_set (insn_b
)) == NULL_RTX
1205 || ! rtx_equal_p (x
, SET_DEST (set_b
)))
1210 insn_b
= prev_nonnote_insn (if_info
.cond_earliest
);
1212 || GET_CODE (insn_b
) != INSN
1213 || (set_b
= single_set (insn_b
)) == NULL_RTX
1214 || ! rtx_equal_p (x
, SET_DEST (set_b
))
1215 || reg_mentioned_p (x
, cond
)
1216 || reg_mentioned_p (x
, a
)
1217 || reg_mentioned_p (x
, SET_SRC (set_b
)))
1218 insn_b
= set_b
= NULL_RTX
;
1220 b
= (set_b
? SET_SRC (set_b
) : x
);
1222 /* X may not be mentioned in the range (cond_earliest, jump]. */
1223 for (insn
= jump
; insn
!= if_info
.cond_earliest
; insn
= PREV_INSN (insn
))
1224 if (INSN_P (insn
) && reg_mentioned_p (x
, insn
))
1227 /* A and B may not be modified in the range [cond_earliest, jump). */
1228 for (insn
= if_info
.cond_earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
1230 && (modified_in_p (a
, insn
) || modified_in_p (b
, insn
)))
1233 /* Only operate on register destinations, and even then avoid extending
1234 the lifetime of hard registers on small register class machines. */
1236 if (GET_CODE (x
) != REG
1237 || (SMALL_REGISTER_CLASSES
1238 && REGNO (x
) < FIRST_PSEUDO_REGISTER
))
1242 x
= gen_reg_rtx (GET_MODE (GET_CODE (x
) == STRICT_LOW_PART
1243 ? XEXP (x
, 0) : x
));
1246 /* Don't operate on sources that may trap or are volatile. */
1247 if (side_effects_p (a
) || side_effects_p (b
)
1248 || (GET_CODE (a
) != MEM
&& may_trap_p (a
))
1249 || (GET_CODE (b
) != MEM
&& may_trap_p (b
)))
1252 /* Set up the info block for our subroutines. */
1253 if_info
.cond
= cond
;
1254 if_info
.jump
= jump
;
1255 if_info
.insn_a
= insn_a
;
1256 if_info
.insn_b
= insn_b
;
1261 /* Try optimizations in some approximation of a useful order. */
1262 /* ??? Should first look to see if X is live incoming at all. If it
1263 isn't, we don't need anything but an unconditional set. */
1265 /* Look and see if A and B are really the same. Avoid creating silly
1266 cmove constructs that no one will fix up later. */
1267 if (rtx_equal_p (a
, b
))
1269 /* If we have an INSN_B, we don't have to create any new rtl. Just
1270 move the instruction that we already have. If we don't have an
1271 INSN_B, that means that A == X, and we've got a noop move. In
1272 that case don't do anything and let the code below delete INSN_A. */
1273 if (insn_b
&& else_bb
)
1277 if (else_bb
&& insn_b
== else_bb
->end
)
1278 else_bb
->end
= PREV_INSN (insn_b
);
1279 reorder_insns (insn_b
, insn_b
, PREV_INSN (if_info
.cond_earliest
));
1281 /* If there was a REG_EQUAL note, delete it since it may have been
1282 true due to this insn being after a jump. */
1283 if ((note
= find_reg_note (insn_b
, REG_EQUAL
, NULL_RTX
)) != 0)
1284 remove_note (insn_b
, note
);
1288 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1289 x must be executed twice. */
1290 else if (insn_b
&& side_effects_p (orig_x
))
1297 if (noce_try_store_flag (&if_info
))
1299 if (HAVE_conditional_move
1300 && noce_try_cmove (&if_info
))
1302 if (! HAVE_conditional_execution
)
1304 if (noce_try_store_flag_constants (&if_info
))
1306 if (noce_try_store_flag_inc (&if_info
))
1308 if (noce_try_store_flag_mask (&if_info
))
1310 if (HAVE_conditional_move
1311 && noce_try_cmove_arith (&if_info
))
1318 /* The original sets may now be killed. */
1319 if (insn_a
== then_bb
->end
)
1320 then_bb
->end
= PREV_INSN (insn_a
);
1321 flow_delete_insn (insn_a
);
1323 /* Several special cases here: First, we may have reused insn_b above,
1324 in which case insn_b is now NULL. Second, we want to delete insn_b
1325 if it came from the ELSE block, because follows the now correct
1326 write that appears in the TEST block. However, if we got insn_b from
1327 the TEST block, it may in fact be loading data needed for the comparison.
1328 We'll let life_analysis remove the insn if it's really dead. */
1329 if (insn_b
&& else_bb
)
1331 if (insn_b
== else_bb
->end
)
1332 else_bb
->end
= PREV_INSN (insn_b
);
1333 flow_delete_insn (insn_b
);
1336 /* The new insns will have been inserted before cond_earliest. We should
1337 be able to remove the jump with impunity, but the condition itself may
1338 have been modified by gcse to be shared across basic blocks. */
1339 test_bb
->end
= PREV_INSN (jump
);
1340 flow_delete_insn (jump
);
1342 /* If we used a temporary, fix it up now. */
1346 noce_emit_move_insn (orig_x
, x
);
1347 insn_b
= gen_sequence ();
1350 test_bb
->end
= emit_insn_after (insn_b
, test_bb
->end
);
1353 /* Merge the blocks! */
1354 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
1359 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1360 straight line code. Return true if successful. */
1363 process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1364 basic_block test_bb
; /* Basic block test is in */
1365 basic_block then_bb
; /* Basic block for THEN block */
1366 basic_block else_bb
; /* Basic block for ELSE block */
1367 basic_block join_bb
; /* Basic block the join label is in */
1369 if (! reload_completed
1370 && noce_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
))
1373 if (HAVE_conditional_execution
1375 && cond_exec_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
))
1381 /* Merge the blocks and mark for local life update. */
1384 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1385 basic_block test_bb
; /* Basic block test is in */
1386 basic_block then_bb
; /* Basic block for THEN block */
1387 basic_block else_bb
; /* Basic block for ELSE block */
1388 basic_block join_bb
; /* Basic block the join label is in */
1390 basic_block combo_bb
;
1392 /* All block merging is done into the lower block numbers. */
1396 /* First merge TEST block into THEN block. This is a no-brainer since
1397 the THEN block did not have a code label to begin with. */
1399 if (combo_bb
->global_live_at_end
)
1400 COPY_REG_SET (combo_bb
->global_live_at_end
, then_bb
->global_live_at_end
);
1401 merge_blocks_nomove (combo_bb
, then_bb
);
1402 num_removed_blocks
++;
1404 /* The ELSE block, if it existed, had a label. That label count
1405 will almost always be zero, but odd things can happen when labels
1406 get their addresses taken. */
1409 merge_blocks_nomove (combo_bb
, else_bb
);
1410 num_removed_blocks
++;
1413 /* If there was no join block reported, that means it was not adjacent
1414 to the others, and so we cannot merge them. */
1418 /* The outgoing edge for the current COMBO block should already
1419 be correct. Verify this. */
1420 if (combo_bb
->succ
== NULL_EDGE
)
1423 /* There should sill be a branch at the end of the THEN or ELSE
1424 blocks taking us to our final destination. */
1425 if (! simplejump_p (combo_bb
->end
)
1426 && ! returnjump_p (combo_bb
->end
))
1430 /* The JOIN block may have had quite a number of other predecessors too.
1431 Since we've already merged the TEST, THEN and ELSE blocks, we should
1432 have only one remaining edge from our if-then-else diamond. If there
1433 is more than one remaining edge, it must come from elsewhere. There
1434 may be zero incoming edges if the THEN block didn't actually join
1435 back up (as with a call to abort). */
1436 else if (join_bb
->pred
== NULL
|| join_bb
->pred
->pred_next
== NULL
)
1438 /* We can merge the JOIN. */
1439 if (combo_bb
->global_live_at_end
)
1440 COPY_REG_SET (combo_bb
->global_live_at_end
,
1441 join_bb
->global_live_at_end
);
1442 merge_blocks_nomove (combo_bb
, join_bb
);
1443 num_removed_blocks
++;
1447 /* We cannot merge the JOIN. */
1449 /* The outgoing edge for the current COMBO block should already
1450 be correct. Verify this. */
1451 if (combo_bb
->succ
->succ_next
!= NULL_EDGE
1452 || combo_bb
->succ
->dest
!= join_bb
)
1455 /* Remove the jump and cruft from the end of the COMBO block. */
1456 tidy_fallthru_edge (combo_bb
->succ
, combo_bb
, join_bb
);
1459 /* Make sure we update life info properly. */
1460 SET_UPDATE_LIFE (combo_bb
);
1462 num_updated_if_blocks
++;
1465 /* Find a block ending in a simple IF condition. Return TRUE if
1466 we were able to transform it in some way. */
1469 find_if_header (test_bb
)
1470 basic_block test_bb
;
1475 /* The kind of block we're looking for has exactly two successors. */
1476 if ((then_edge
= test_bb
->succ
) == NULL_EDGE
1477 || (else_edge
= then_edge
->succ_next
) == NULL_EDGE
1478 || else_edge
->succ_next
!= NULL_EDGE
)
1481 /* Neither edge should be abnormal. */
1482 if ((then_edge
->flags
& EDGE_COMPLEX
)
1483 || (else_edge
->flags
& EDGE_COMPLEX
))
1486 /* The THEN edge is canonically the one that falls through. */
1487 if (then_edge
->flags
& EDGE_FALLTHRU
)
1489 else if (else_edge
->flags
& EDGE_FALLTHRU
)
1492 else_edge
= then_edge
;
1496 /* Otherwise this must be a multiway branch of some sort. */
1499 if (find_if_block (test_bb
, then_edge
, else_edge
))
1502 && (! HAVE_conditional_execution
|| reload_completed
))
1504 if (find_if_case_1 (test_bb
, then_edge
, else_edge
))
1506 if (find_if_case_2 (test_bb
, then_edge
, else_edge
))
1514 fprintf (rtl_dump_file
, "Conversion succeeded.\n");
1518 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1519 block. If so, we'll try to convert the insns to not require the branch.
1520 Return TRUE if we were successful at converting the the block. */
1523 find_if_block (test_bb
, then_edge
, else_edge
)
1524 basic_block test_bb
;
1525 edge then_edge
, else_edge
;
1527 basic_block then_bb
= then_edge
->dest
;
1528 basic_block else_bb
= else_edge
->dest
;
1529 basic_block join_bb
= NULL_BLOCK
;
1530 edge then_succ
= then_bb
->succ
;
1531 edge else_succ
= else_bb
->succ
;
1534 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1535 if (then_bb
->pred
->pred_next
!= NULL_EDGE
)
1538 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1539 if (then_succ
!= NULL_EDGE
1540 && (then_succ
->succ_next
!= NULL_EDGE
1541 || (then_succ
->flags
& EDGE_COMPLEX
)))
1544 /* If the THEN block has no successors, conditional execution can still
1545 make a conditional call. Don't do this unless the ELSE block has
1546 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1547 Check for the last insn of the THEN block being an indirect jump, which
1548 is listed as not having any successors, but confuses the rest of the CE
1549 code processing. XXX we should fix this in the future. */
1550 if (then_succ
== NULL
)
1552 if (else_bb
->pred
->pred_next
== NULL_EDGE
)
1554 rtx last_insn
= then_bb
->end
;
1557 && GET_CODE (last_insn
) == NOTE
1558 && last_insn
!= then_bb
->head
)
1559 last_insn
= PREV_INSN (last_insn
);
1562 && GET_CODE (last_insn
) == JUMP_INSN
1563 && ! simplejump_p (last_insn
))
1567 else_bb
= NULL_BLOCK
;
1573 /* If the THEN block's successor is the other edge out of the TEST block,
1574 then we have an IF-THEN combo without an ELSE. */
1575 else if (then_succ
->dest
== else_bb
)
1578 else_bb
= NULL_BLOCK
;
1581 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1582 has exactly one predecessor and one successor, and the outgoing edge
1583 is not complex, then we have an IF-THEN-ELSE combo. */
1584 else if (else_succ
!= NULL_EDGE
1585 && then_succ
->dest
== else_succ
->dest
1586 && else_bb
->pred
->pred_next
== NULL_EDGE
1587 && else_succ
->succ_next
== NULL_EDGE
1588 && ! (else_succ
->flags
& EDGE_COMPLEX
))
1589 join_bb
= else_succ
->dest
;
1591 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1595 num_possible_if_blocks
++;
1600 fprintf (rtl_dump_file
,
1601 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1602 test_bb
->index
, then_bb
->index
, else_bb
->index
,
1605 fprintf (rtl_dump_file
,
1606 "\nIF-THEN block found, start %d, then %d, join %d\n",
1607 test_bb
->index
, then_bb
->index
, join_bb
->index
);
1610 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1611 get the first condition for free, since we've already asserted that
1612 there's a fallthru edge from IF to THEN. */
1613 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1614 BLOCK notes, if by no other means than aborting the merge if they
1615 exist. Sticky enough I don't want to think about it now. */
1616 next_index
= then_bb
->index
;
1617 if (else_bb
&& ++next_index
!= else_bb
->index
)
1619 if (++next_index
!= join_bb
->index
)
1627 /* Do the real work. */
1628 return process_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
1631 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1632 transformable, but not necessarily the other. There need be no
1635 Return TRUE if we were successful at converting the the block.
1637 Cases we'd like to look at:
1640 if (test) goto over; // x not live
1648 if (! test) goto label;
1651 if (test) goto E; // x not live
1665 (3) // This one's really only interesting for targets that can do
1666 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1667 // it results in multiple branches on a cache line, which often
1668 // does not sit well with predictors.
1670 if (test1) goto E; // predicted not taken
1686 (A) Don't do (2) if the branch is predicted against the block we're
1687 eliminating. Do it anyway if we can eliminate a branch; this requires
1688 that the sole successor of the eliminated block postdominate the other
1691 (B) With CE, on (3) we can steal from both sides of the if, creating
1700 Again, this is most useful if J postdominates.
1702 (C) CE substitutes for helpful life information.
1704 (D) These heuristics need a lot of work. */
1706 /* Tests for case 1 above. */
1709 find_if_case_1 (test_bb
, then_edge
, else_edge
)
1710 basic_block test_bb
;
1711 edge then_edge
, else_edge
;
1713 basic_block then_bb
= then_edge
->dest
;
1714 basic_block else_bb
= else_edge
->dest
;
1715 edge then_succ
= then_bb
->succ
;
1718 /* THEN has one successor. */
1719 if (!then_succ
|| then_succ
->succ_next
!= NULL
)
1722 /* THEN does not fall through, but is not strange either. */
1723 if (then_succ
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
))
1726 /* THEN has one predecessor. */
1727 if (then_bb
->pred
->pred_next
!= NULL
)
1730 /* ELSE follows THEN. (??? could be moved) */
1731 if (else_bb
->index
!= then_bb
->index
+ 1)
1734 num_possible_if_blocks
++;
1736 fprintf (rtl_dump_file
,
1737 "\nIF-CASE-1 found, start %d, then %d\n",
1738 test_bb
->index
, then_bb
->index
);
1740 /* THEN is small. */
1741 if (count_bb_insns (then_bb
) > BRANCH_COST
)
1744 /* Find the label for THEN's destination. */
1745 if (then_succ
->dest
== EXIT_BLOCK_PTR
)
1749 new_lab
= JUMP_LABEL (then_bb
->end
);
1754 /* Registers set are dead, or are predicable. */
1755 if (! dead_or_predicable (test_bb
, then_bb
, else_bb
, new_lab
, 1))
1758 /* Conversion went ok, including moving the insns and fixing up the
1759 jump. Adjust the CFG to match. */
1761 SET_UPDATE_LIFE (test_bb
);
1762 bitmap_operation (test_bb
->global_live_at_end
,
1763 else_bb
->global_live_at_start
,
1764 then_bb
->global_live_at_end
, BITMAP_IOR
);
1766 make_edge (NULL
, test_bb
, then_succ
->dest
, 0);
1767 flow_delete_block (then_bb
);
1768 tidy_fallthru_edge (else_edge
, test_bb
, else_bb
);
1770 num_removed_blocks
++;
1771 num_updated_if_blocks
++;
1776 /* Test for case 2 above. */
1779 find_if_case_2 (test_bb
, then_edge
, else_edge
)
1780 basic_block test_bb
;
1781 edge then_edge
, else_edge
;
1783 basic_block then_bb
= then_edge
->dest
;
1784 basic_block else_bb
= else_edge
->dest
;
1785 edge else_succ
= else_bb
->succ
;
1788 /* ELSE has one successor. */
1789 if (!else_succ
|| else_succ
->succ_next
!= NULL
)
1792 /* ELSE outgoing edge is not complex. */
1793 if (else_succ
->flags
& EDGE_COMPLEX
)
1796 /* ELSE has one predecessor. */
1797 if (else_bb
->pred
->pred_next
!= NULL
)
1800 /* THEN is not EXIT. */
1801 if (then_bb
->index
< 0)
1804 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1805 note
= find_reg_note (test_bb
->end
, REG_BR_PROB
, NULL_RTX
);
1806 if (note
&& INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2)
1808 else if (else_succ
->dest
->index
< 0
1809 || TEST_BIT (post_dominators
[ORIG_INDEX (then_bb
)],
1810 ORIG_INDEX (else_succ
->dest
)))
1815 num_possible_if_blocks
++;
1817 fprintf (rtl_dump_file
,
1818 "\nIF-CASE-2 found, start %d, else %d\n",
1819 test_bb
->index
, else_bb
->index
);
1821 /* ELSE is small. */
1822 if (count_bb_insns (then_bb
) > BRANCH_COST
)
1825 /* Find the label for ELSE's destination. */
1826 if (else_succ
->dest
== EXIT_BLOCK_PTR
)
1830 if (else_succ
->flags
& EDGE_FALLTHRU
)
1832 new_lab
= else_succ
->dest
->head
;
1833 if (GET_CODE (new_lab
) != CODE_LABEL
)
1838 new_lab
= JUMP_LABEL (else_bb
->end
);
1844 /* Registers set are dead, or are predicable. */
1845 if (! dead_or_predicable (test_bb
, else_bb
, then_bb
, new_lab
, 0))
1848 /* Conversion went ok, including moving the insns and fixing up the
1849 jump. Adjust the CFG to match. */
1851 SET_UPDATE_LIFE (test_bb
);
1852 bitmap_operation (test_bb
->global_live_at_end
,
1853 then_bb
->global_live_at_start
,
1854 else_bb
->global_live_at_end
, BITMAP_IOR
);
1856 remove_edge (else_edge
);
1857 make_edge (NULL
, test_bb
, else_succ
->dest
, 0);
1858 flow_delete_block (else_bb
);
1860 num_removed_blocks
++;
1861 num_updated_if_blocks
++;
1863 /* ??? We may now fallthru from one of THEN's successors into a join
1864 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1869 /* A subroutine of dead_or_predicable called through for_each_rtx.
1870 Return 1 if a memory is found. */
1873 find_memory (px
, data
)
1875 void *data ATTRIBUTE_UNUSED
;
1877 return GET_CODE (*px
) == MEM
;
1880 /* Used by the code above to perform the actual rtl transformations.
1881 Return TRUE if successful.
1883 TEST_BB is the block containing the conditional branch. MERGE_BB
1884 is the block containing the code to manipulate. NEW_DEST is the
1885 label TEST_BB should be branching to after the conversion.
1886 REVERSEP is true if the sense of the branch should be reversed. */
1889 dead_or_predicable (test_bb
, merge_bb
, other_bb
, new_dest
, reversep
)
1890 basic_block test_bb
, merge_bb
, other_bb
;
1894 rtx head
, end
, jump
, earliest
, old_dest
;
1896 /* No code movement can occur if we'd be scrogging EH regions.
1897 Within MERGE_BB, ensure that we've not got stray EH_BEG or EH_END
1898 notes within the block. Between the blocks, checking that the end
1899 region numbers match ensures that we won't disrupt the nesting
1901 if (merge_bb
->eh_beg
!= merge_bb
->eh_end
1902 || merge_bb
->eh_end
!= test_bb
->eh_end
)
1905 jump
= test_bb
->end
;
1907 /* Find the extent of the real code in the merge block. */
1908 head
= merge_bb
->head
;
1909 end
= merge_bb
->end
;
1911 if (GET_CODE (head
) == CODE_LABEL
)
1912 head
= NEXT_INSN (head
);
1913 if (GET_CODE (head
) == NOTE
)
1917 head
= end
= NULL_RTX
;
1920 head
= NEXT_INSN (head
);
1923 if (GET_CODE (end
) == JUMP_INSN
)
1927 head
= end
= NULL_RTX
;
1930 end
= PREV_INSN (end
);
1933 /* Disable handling dead code by conditional execution if the machine needs
1934 to do anything funny with the tests, etc. */
1935 #ifndef IFCVT_MODIFY_TESTS
1936 if (HAVE_conditional_execution
)
1938 /* In the conditional execution case, we have things easy. We know
1939 the condition is reversable. We don't have to check life info,
1940 becase we're going to conditionally execute the code anyway.
1941 All that's left is making sure the insns involved can actually
1946 cond
= cond_exec_get_condition (jump
);
1948 prob_val
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
1950 prob_val
= XEXP (prob_val
, 0);
1954 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
1955 GET_MODE (cond
), XEXP (cond
, 0),
1958 prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (prob_val
));
1961 if (! cond_exec_process_insns (head
, end
, cond
, prob_val
, 0))
1969 /* In the non-conditional execution case, we have to verify that there
1970 are no trapping operations, no calls, no references to memory, and
1971 that any registers modified are dead at the branch site. */
1973 rtx insn
, cond
, prev
;
1974 regset_head merge_set_head
, tmp_head
, test_live_head
, test_set_head
;
1975 regset merge_set
, tmp
, test_live
, test_set
;
1976 struct propagate_block_info
*pbi
;
1979 /* Check for no calls or trapping operations. */
1980 for (insn
= head
; ; insn
= NEXT_INSN (insn
))
1982 if (GET_CODE (insn
) == CALL_INSN
)
1986 if (may_trap_p (PATTERN (insn
)))
1989 /* ??? Even non-trapping memories such as stack frame
1990 references must be avoided. For stores, we collect
1991 no lifetime info; for reads, we'd have to assert
1992 true_dependance false against every store in the
1994 if (for_each_rtx (&PATTERN (insn
), find_memory
, NULL
))
2001 if (! any_condjump_p (jump
))
2004 /* Find the extent of the conditional. */
2005 cond
= noce_get_condition (jump
, &earliest
);
2010 MERGE_SET = set of registers set in MERGE_BB
2011 TEST_LIVE = set of registers live at EARLIEST
2012 TEST_SET = set of registers set between EARLIEST and the
2013 end of the block. */
2015 tmp
= INITIALIZE_REG_SET (tmp_head
);
2016 merge_set
= INITIALIZE_REG_SET (merge_set_head
);
2017 test_live
= INITIALIZE_REG_SET (test_live_head
);
2018 test_set
= INITIALIZE_REG_SET (test_set_head
);
2020 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2021 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2022 since we've already asserted that MERGE_BB is small. */
2023 propagate_block (merge_bb
, tmp
, merge_set
, merge_set
, 0);
2025 /* For small register class machines, don't lengthen lifetimes of
2026 hard registers before reload. */
2027 if (SMALL_REGISTER_CLASSES
&& ! reload_completed
)
2029 EXECUTE_IF_SET_IN_BITMAP
2032 if (i
< FIRST_PSEUDO_REGISTER
2034 && ! global_regs
[i
])
2039 /* For TEST, we're interested in a range of insns, not a whole block.
2040 Moreover, we're interested in the insns live from OTHER_BB. */
2042 COPY_REG_SET (test_live
, other_bb
->global_live_at_start
);
2043 pbi
= init_propagate_block_info (test_bb
, test_live
, test_set
, test_set
,
2046 for (insn
= jump
; ; insn
= prev
)
2048 prev
= propagate_one_insn (pbi
, insn
);
2049 if (insn
== earliest
)
2053 free_propagate_block_info (pbi
);
2055 /* We can perform the transformation if
2056 MERGE_SET & (TEST_SET | TEST_LIVE)
2058 TEST_SET & merge_bb->global_live_at_start
2061 bitmap_operation (tmp
, test_set
, test_live
, BITMAP_IOR
);
2062 bitmap_operation (tmp
, tmp
, merge_set
, BITMAP_AND
);
2063 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
2065 bitmap_operation (tmp
, test_set
, merge_bb
->global_live_at_start
,
2067 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
2070 FREE_REG_SET (merge_set
);
2071 FREE_REG_SET (test_live
);
2072 FREE_REG_SET (test_set
);
2079 /* We don't want to use normal invert_jump or redirect_jump because
2080 we don't want to delete_insn called. Also, we want to do our own
2081 change group management. */
2083 old_dest
= JUMP_LABEL (jump
);
2085 ? ! invert_jump_1 (jump
, new_dest
)
2086 : ! redirect_jump_1 (jump
, new_dest
))
2089 if (! apply_change_group ())
2093 LABEL_NUSES (old_dest
) -= 1;
2095 LABEL_NUSES (new_dest
) += 1;
2096 JUMP_LABEL (jump
) = new_dest
;
2100 rtx note
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
2102 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
2105 /* Move the insns out of MERGE_BB to before the branch. */
2108 if (end
== merge_bb
->end
)
2109 merge_bb
->end
= PREV_INSN (head
);
2111 head
= squeeze_notes (head
, end
);
2112 if (GET_CODE (end
) == NOTE
2113 && (NOTE_LINE_NUMBER (end
) == NOTE_INSN_BLOCK_END
2114 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_BLOCK_BEG
2115 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_BEG
2116 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_END
2117 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_CONT
2118 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_VTOP
))
2122 end
= PREV_INSN (end
);
2125 reorder_insns (head
, end
, PREV_INSN (earliest
));
2134 /* Main entry point for all if-conversion. */
2137 if_convert (life_data_ok
)
2142 num_possible_if_blocks
= 0;
2143 num_updated_if_blocks
= 0;
2144 num_removed_blocks
= 0;
2146 /* Free up basic_block_for_insn so that we don't have to keep it
2147 up to date, either here or in merge_blocks_nomove. */
2148 free_basic_block_vars (1);
2150 /* Compute postdominators if we think we'll use them. */
2151 post_dominators
= NULL
;
2152 if (HAVE_conditional_execution
|| life_data_ok
)
2154 post_dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
2155 calculate_dominance_info (NULL
, post_dominators
, CDI_POST_DOMINATORS
);
2158 /* Record initial block numbers. */
2159 for (block_num
= 0; block_num
< n_basic_blocks
; block_num
++)
2160 SET_ORIG_INDEX (BASIC_BLOCK (block_num
), block_num
);
2162 /* Go through each of the basic blocks looking for things to convert. */
2163 for (block_num
= 0; block_num
< n_basic_blocks
; )
2165 basic_block bb
= BASIC_BLOCK (block_num
);
2166 if (find_if_header (bb
))
2167 block_num
= bb
->index
;
2172 if (post_dominators
)
2173 sbitmap_vector_free (post_dominators
);
2176 fflush (rtl_dump_file
);
2178 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2179 compute_bb_for_insn (get_max_uid ());
2181 /* Rebuild life info for basic blocks that require it. */
2182 if (num_removed_blocks
&& life_data_ok
)
2184 sbitmap update_life_blocks
= sbitmap_alloc (n_basic_blocks
);
2185 sbitmap_zero (update_life_blocks
);
2187 /* If we allocated new pseudos, we must resize the array for sched1. */
2188 if (max_regno
< max_reg_num ())
2190 max_regno
= max_reg_num ();
2191 allocate_reg_info (max_regno
, FALSE
, FALSE
);
2194 for (block_num
= 0; block_num
< n_basic_blocks
; block_num
++)
2195 if (UPDATE_LIFE (BASIC_BLOCK (block_num
)))
2196 SET_BIT (update_life_blocks
, block_num
);
2198 count_or_remove_death_notes (update_life_blocks
, 1);
2199 /* ??? See about adding a mode that verifies that the initial
2200 set of blocks don't let registers come live. */
2201 update_life_info (update_life_blocks
, UPDATE_LIFE_GLOBAL
,
2202 PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
2203 | PROP_KILL_DEAD_CODE
);
2205 sbitmap_free (update_life_blocks
);
2208 /* Write the final stats. */
2209 if (rtl_dump_file
&& num_possible_if_blocks
> 0)
2211 fprintf (rtl_dump_file
,
2212 "\n%d possible IF blocks searched.\n",
2213 num_possible_if_blocks
);
2214 fprintf (rtl_dump_file
,
2215 "%d IF blocks converted.\n",
2216 num_updated_if_blocks
);
2217 fprintf (rtl_dump_file
,
2218 "%d basic blocks deleted.\n\n\n",
2219 num_removed_blocks
);
2222 #ifdef ENABLE_CHECKING
2223 verify_flow_info ();