1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
30 #include "insn-config.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks
;
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
76 static int num_updated_if_blocks
;
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes
;
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p
;
84 /* True if life data ok at present. */
85 static bool life_data_ok
;
87 /* Forward references. */
88 static int count_bb_insns (basic_block
);
89 static int total_bb_rtx_cost (basic_block
);
90 static rtx
first_active_insn (basic_block
);
91 static rtx
last_active_insn (basic_block
, int);
92 static basic_block
block_fallthru (basic_block
);
93 static int cond_exec_process_insns (ce_if_block_t
*, rtx
, rtx
, rtx
, rtx
, int);
94 static rtx
cond_exec_get_condition (rtx
);
95 static int cond_exec_process_if_block (ce_if_block_t
*, int);
96 static rtx
noce_get_condition (rtx
, rtx
*);
97 static int noce_operand_ok (rtx
);
98 static int noce_process_if_block (ce_if_block_t
*);
99 static int process_if_block (ce_if_block_t
*);
100 static void merge_if_block (ce_if_block_t
*);
101 static int find_cond_trap (basic_block
, edge
, edge
);
102 static basic_block
find_if_header (basic_block
, int);
103 static int block_jumps_and_fallthru_p (basic_block
, basic_block
);
104 static int find_if_block (ce_if_block_t
*);
105 static int find_if_case_1 (basic_block
, edge
, edge
);
106 static int find_if_case_2 (basic_block
, edge
, edge
);
107 static int find_memory (rtx
*, void *);
108 static int dead_or_predicable (basic_block
, basic_block
, basic_block
,
110 static void noce_emit_move_insn (rtx
, rtx
);
111 static rtx
block_has_only_trap (basic_block
);
112 static void mark_loop_exit_edges (void);
114 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
116 mark_loop_exit_edges (void)
122 flow_loops_find (&loops
, LOOP_TREE
);
123 free_dominance_info (CDI_DOMINATORS
);
129 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
131 if (find_common_loop (bb
->loop_father
, e
->dest
->loop_father
)
133 e
->flags
|= EDGE_LOOP_EXIT
;
135 e
->flags
&= ~EDGE_LOOP_EXIT
;
140 flow_loops_free (&loops
);
143 /* Count the number of non-jump active insns in BB. */
146 count_bb_insns (basic_block bb
)
149 rtx insn
= BB_HEAD (bb
);
153 if (CALL_P (insn
) || NONJUMP_INSN_P (insn
))
156 if (insn
== BB_END (bb
))
158 insn
= NEXT_INSN (insn
);
164 /* Count the total insn_rtx_cost of non-jump active insns in BB.
165 This function returns -1, if the cost of any instruction could
169 total_bb_rtx_cost (basic_block bb
)
172 rtx insn
= BB_HEAD (bb
);
176 if (NONJUMP_INSN_P (insn
))
178 int cost
= insn_rtx_cost (PATTERN (insn
));
183 else if (CALL_P (insn
))
186 if (insn
== BB_END (bb
))
188 insn
= NEXT_INSN (insn
);
194 /* Return the first non-jump active insn in the basic block. */
197 first_active_insn (basic_block bb
)
199 rtx insn
= BB_HEAD (bb
);
203 if (insn
== BB_END (bb
))
205 insn
= NEXT_INSN (insn
);
208 while (NOTE_P (insn
))
210 if (insn
== BB_END (bb
))
212 insn
= NEXT_INSN (insn
);
221 /* Return the last non-jump active (non-jump) insn in the basic block. */
224 last_active_insn (basic_block bb
, int skip_use_p
)
226 rtx insn
= BB_END (bb
);
227 rtx head
= BB_HEAD (bb
);
232 && NONJUMP_INSN_P (insn
)
233 && GET_CODE (PATTERN (insn
)) == USE
))
237 insn
= PREV_INSN (insn
);
246 /* Return the basic block reached by falling though the basic block BB. */
249 block_fallthru (basic_block bb
)
254 e
!= NULL_EDGE
&& (e
->flags
& EDGE_FALLTHRU
) == 0;
258 return (e
) ? e
->dest
: NULL_BLOCK
;
261 /* Go through a bunch of insns, converting them to conditional
262 execution format if possible. Return TRUE if all of the non-note
263 insns were processed. */
266 cond_exec_process_insns (ce_if_block_t
*ce_info ATTRIBUTE_UNUSED
,
267 /* if block information */rtx start
,
268 /* first insn to look at */rtx end
,
269 /* last insn to look at */rtx test
,
270 /* conditional execution test */rtx prob_val
,
271 /* probability of branch taken. */int mod_ok
)
273 int must_be_last
= FALSE
;
281 for (insn
= start
; ; insn
= NEXT_INSN (insn
))
286 if (!NONJUMP_INSN_P (insn
) && !CALL_P (insn
))
289 /* Remove USE insns that get in the way. */
290 if (reload_completed
&& GET_CODE (PATTERN (insn
)) == USE
)
292 /* ??? Ug. Actually unlinking the thing is problematic,
293 given what we'd have to coordinate with our callers. */
294 SET_INSN_DELETED (insn
);
298 /* Last insn wasn't last? */
302 if (modified_in_p (test
, insn
))
309 /* Now build the conditional form of the instruction. */
310 pattern
= PATTERN (insn
);
311 xtest
= copy_rtx (test
);
313 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
315 if (GET_CODE (pattern
) == COND_EXEC
)
317 if (GET_MODE (xtest
) != GET_MODE (COND_EXEC_TEST (pattern
)))
320 xtest
= gen_rtx_AND (GET_MODE (xtest
), xtest
,
321 COND_EXEC_TEST (pattern
));
322 pattern
= COND_EXEC_CODE (pattern
);
325 pattern
= gen_rtx_COND_EXEC (VOIDmode
, xtest
, pattern
);
327 /* If the machine needs to modify the insn being conditionally executed,
328 say for example to force a constant integer operand into a temp
329 register, do so here. */
330 #ifdef IFCVT_MODIFY_INSN
331 IFCVT_MODIFY_INSN (ce_info
, pattern
, insn
);
336 validate_change (insn
, &PATTERN (insn
), pattern
, 1);
338 if (CALL_P (insn
) && prob_val
)
339 validate_change (insn
, ®_NOTES (insn
),
340 alloc_EXPR_LIST (REG_BR_PROB
, prob_val
,
341 REG_NOTES (insn
)), 1);
351 /* Return the condition for a jump. Do not do any special processing. */
354 cond_exec_get_condition (rtx jump
)
358 if (any_condjump_p (jump
))
359 test_if
= SET_SRC (pc_set (jump
));
362 cond
= XEXP (test_if
, 0);
364 /* If this branches to JUMP_LABEL when the condition is false,
365 reverse the condition. */
366 if (GET_CODE (XEXP (test_if
, 2)) == LABEL_REF
367 && XEXP (XEXP (test_if
, 2), 0) == JUMP_LABEL (jump
))
369 enum rtx_code rev
= reversed_comparison_code (cond
, jump
);
373 cond
= gen_rtx_fmt_ee (rev
, GET_MODE (cond
), XEXP (cond
, 0),
380 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
381 to conditional execution. Return TRUE if we were successful at
382 converting the block. */
385 cond_exec_process_if_block (ce_if_block_t
* ce_info
,
386 /* if block information */int do_multiple_p
)
388 basic_block test_bb
= ce_info
->test_bb
; /* last test block */
389 basic_block then_bb
= ce_info
->then_bb
; /* THEN */
390 basic_block else_bb
= ce_info
->else_bb
; /* ELSE or NULL */
391 rtx test_expr
; /* expression in IF_THEN_ELSE that is tested */
392 rtx then_start
; /* first insn in THEN block */
393 rtx then_end
; /* last insn + 1 in THEN block */
394 rtx else_start
= NULL_RTX
; /* first insn in ELSE block or NULL */
395 rtx else_end
= NULL_RTX
; /* last insn + 1 in ELSE block */
396 int max
; /* max # of insns to convert. */
397 int then_mod_ok
; /* whether conditional mods are ok in THEN */
398 rtx true_expr
; /* test for else block insns */
399 rtx false_expr
; /* test for then block insns */
400 rtx true_prob_val
; /* probability of else block */
401 rtx false_prob_val
; /* probability of then block */
403 enum rtx_code false_code
;
405 /* If test is comprised of && or || elements, and we've failed at handling
406 all of them together, just use the last test if it is the special case of
407 && elements without an ELSE block. */
408 if (!do_multiple_p
&& ce_info
->num_multiple_test_blocks
)
410 if (else_bb
|| ! ce_info
->and_and_p
)
413 ce_info
->test_bb
= test_bb
= ce_info
->last_test_bb
;
414 ce_info
->num_multiple_test_blocks
= 0;
415 ce_info
->num_and_and_blocks
= 0;
416 ce_info
->num_or_or_blocks
= 0;
419 /* Find the conditional jump to the ELSE or JOIN part, and isolate
421 test_expr
= cond_exec_get_condition (BB_END (test_bb
));
425 /* If the conditional jump is more than just a conditional jump,
426 then we can not do conditional execution conversion on this block. */
427 if (! onlyjump_p (BB_END (test_bb
)))
430 /* Collect the bounds of where we're to search, skipping any labels, jumps
431 and notes at the beginning and end of the block. Then count the total
432 number of insns and see if it is small enough to convert. */
433 then_start
= first_active_insn (then_bb
);
434 then_end
= last_active_insn (then_bb
, TRUE
);
435 n_insns
= ce_info
->num_then_insns
= count_bb_insns (then_bb
);
436 max
= MAX_CONDITIONAL_EXECUTE
;
441 else_start
= first_active_insn (else_bb
);
442 else_end
= last_active_insn (else_bb
, TRUE
);
443 n_insns
+= ce_info
->num_else_insns
= count_bb_insns (else_bb
);
449 /* Map test_expr/test_jump into the appropriate MD tests to use on
450 the conditionally executed code. */
452 true_expr
= test_expr
;
454 false_code
= reversed_comparison_code (true_expr
, BB_END (test_bb
));
455 if (false_code
!= UNKNOWN
)
456 false_expr
= gen_rtx_fmt_ee (false_code
, GET_MODE (true_expr
),
457 XEXP (true_expr
, 0), XEXP (true_expr
, 1));
459 false_expr
= NULL_RTX
;
461 #ifdef IFCVT_MODIFY_TESTS
462 /* If the machine description needs to modify the tests, such as setting a
463 conditional execution register from a comparison, it can do so here. */
464 IFCVT_MODIFY_TESTS (ce_info
, true_expr
, false_expr
);
466 /* See if the conversion failed. */
467 if (!true_expr
|| !false_expr
)
471 true_prob_val
= find_reg_note (BB_END (test_bb
), REG_BR_PROB
, NULL_RTX
);
474 true_prob_val
= XEXP (true_prob_val
, 0);
475 false_prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (true_prob_val
));
478 false_prob_val
= NULL_RTX
;
480 /* If we have && or || tests, do them here. These tests are in the adjacent
481 blocks after the first block containing the test. */
482 if (ce_info
->num_multiple_test_blocks
> 0)
484 basic_block bb
= test_bb
;
485 basic_block last_test_bb
= ce_info
->last_test_bb
;
495 bb
= block_fallthru (bb
);
496 start
= first_active_insn (bb
);
497 end
= last_active_insn (bb
, TRUE
);
499 && ! cond_exec_process_insns (ce_info
, start
, end
, false_expr
,
500 false_prob_val
, FALSE
))
503 /* If the conditional jump is more than just a conditional jump, then
504 we can not do conditional execution conversion on this block. */
505 if (! onlyjump_p (BB_END (bb
)))
508 /* Find the conditional jump and isolate the test. */
509 t
= cond_exec_get_condition (BB_END (bb
));
513 f
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (t
)),
518 if (ce_info
->and_and_p
)
520 t
= gen_rtx_AND (GET_MODE (t
), true_expr
, t
);
521 f
= gen_rtx_IOR (GET_MODE (t
), false_expr
, f
);
525 t
= gen_rtx_IOR (GET_MODE (t
), true_expr
, t
);
526 f
= gen_rtx_AND (GET_MODE (t
), false_expr
, f
);
529 /* If the machine description needs to modify the tests, such as
530 setting a conditional execution register from a comparison, it can
532 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
533 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info
, bb
, t
, f
);
535 /* See if the conversion failed. */
543 while (bb
!= last_test_bb
);
546 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
547 on then THEN block. */
548 then_mod_ok
= (else_bb
== NULL_BLOCK
);
550 /* Go through the THEN and ELSE blocks converting the insns if possible
551 to conditional execution. */
555 || ! cond_exec_process_insns (ce_info
, then_start
, then_end
,
556 false_expr
, false_prob_val
,
560 if (else_bb
&& else_end
561 && ! cond_exec_process_insns (ce_info
, else_start
, else_end
,
562 true_expr
, true_prob_val
, TRUE
))
565 /* If we cannot apply the changes, fail. Do not go through the normal fail
566 processing, since apply_change_group will call cancel_changes. */
567 if (! apply_change_group ())
569 #ifdef IFCVT_MODIFY_CANCEL
570 /* Cancel any machine dependent changes. */
571 IFCVT_MODIFY_CANCEL (ce_info
);
576 #ifdef IFCVT_MODIFY_FINAL
577 /* Do any machine dependent final modifications. */
578 IFCVT_MODIFY_FINAL (ce_info
);
581 /* Conversion succeeded. */
583 fprintf (dump_file
, "%d insn%s converted to conditional execution.\n",
584 n_insns
, (n_insns
== 1) ? " was" : "s were");
586 /* Merge the blocks! */
587 merge_if_block (ce_info
);
588 cond_exec_changed_p
= TRUE
;
592 #ifdef IFCVT_MODIFY_CANCEL
593 /* Cancel any machine dependent changes. */
594 IFCVT_MODIFY_CANCEL (ce_info
);
601 /* Used by noce_process_if_block to communicate with its subroutines.
603 The subroutines know that A and B may be evaluated freely. They
604 know that X is a register. They should insert new instructions
605 before cond_earliest. */
612 rtx jump
, cond
, cond_earliest
;
613 /* True if "b" was originally evaluated unconditionally. */
614 bool b_unconditional
;
617 static rtx
noce_emit_store_flag (struct noce_if_info
*, rtx
, int, int);
618 static int noce_try_move (struct noce_if_info
*);
619 static int noce_try_store_flag (struct noce_if_info
*);
620 static int noce_try_addcc (struct noce_if_info
*);
621 static int noce_try_store_flag_constants (struct noce_if_info
*);
622 static int noce_try_store_flag_mask (struct noce_if_info
*);
623 static rtx
noce_emit_cmove (struct noce_if_info
*, rtx
, enum rtx_code
, rtx
,
625 static int noce_try_cmove (struct noce_if_info
*);
626 static int noce_try_cmove_arith (struct noce_if_info
*);
627 static rtx
noce_get_alt_condition (struct noce_if_info
*, rtx
, rtx
*);
628 static int noce_try_minmax (struct noce_if_info
*);
629 static int noce_try_abs (struct noce_if_info
*);
630 static int noce_try_sign_mask (struct noce_if_info
*);
632 /* Helper function for noce_try_store_flag*. */
635 noce_emit_store_flag (struct noce_if_info
*if_info
, rtx x
, int reversep
,
638 rtx cond
= if_info
->cond
;
642 cond_complex
= (! general_operand (XEXP (cond
, 0), VOIDmode
)
643 || ! general_operand (XEXP (cond
, 1), VOIDmode
));
645 /* If earliest == jump, or when the condition is complex, try to
646 build the store_flag insn directly. */
649 cond
= XEXP (SET_SRC (pc_set (if_info
->jump
)), 0);
652 code
= reversed_comparison_code (cond
, if_info
->jump
);
654 code
= GET_CODE (cond
);
656 if ((if_info
->cond_earliest
== if_info
->jump
|| cond_complex
)
657 && (normalize
== 0 || STORE_FLAG_VALUE
== normalize
))
661 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (cond
, 0),
663 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
666 tmp
= emit_insn (tmp
);
668 if (recog_memoized (tmp
) >= 0)
674 if_info
->cond_earliest
= if_info
->jump
;
682 /* Don't even try if the comparison operands or the mode of X are weird. */
683 if (cond_complex
|| !SCALAR_INT_MODE_P (GET_MODE (x
)))
686 return emit_store_flag (x
, code
, XEXP (cond
, 0),
687 XEXP (cond
, 1), VOIDmode
,
688 (code
== LTU
|| code
== LEU
689 || code
== GEU
|| code
== GTU
), normalize
);
692 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
693 X is the destination/target and Y is the value to copy. */
696 noce_emit_move_insn (rtx x
, rtx y
)
698 enum machine_mode outmode
, inmode
;
702 if (GET_CODE (x
) != STRICT_LOW_PART
)
704 emit_move_insn (x
, y
);
709 inner
= XEXP (outer
, 0);
710 outmode
= GET_MODE (outer
);
711 inmode
= GET_MODE (inner
);
712 bitpos
= SUBREG_BYTE (outer
) * BITS_PER_UNIT
;
713 store_bit_field (inner
, GET_MODE_BITSIZE (outmode
), bitpos
, outmode
, y
);
716 /* Return sequence of instructions generated by if conversion. This
717 function calls end_sequence() to end the current stream, ensures
718 that are instructions are unshared, recognizable non-jump insns.
719 On failure, this function returns a NULL_RTX. */
722 end_ifcvt_sequence (struct noce_if_info
*if_info
)
725 rtx seq
= get_insns ();
727 set_used_flags (if_info
->x
);
728 set_used_flags (if_info
->cond
);
729 unshare_all_rtl_in_chain (seq
);
732 /* Make sure that all of the instructions emitted are recognizable,
733 and that we haven't introduced a new jump instruction.
734 As an exercise for the reader, build a general mechanism that
735 allows proper placement of required clobbers. */
736 for (insn
= seq
; insn
; insn
= NEXT_INSN (insn
))
738 || recog_memoized (insn
) == -1)
744 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
745 "if (a == b) x = a; else x = b" into "x = b". */
748 noce_try_move (struct noce_if_info
*if_info
)
750 rtx cond
= if_info
->cond
;
751 enum rtx_code code
= GET_CODE (cond
);
754 if (code
!= NE
&& code
!= EQ
)
757 /* This optimization isn't valid if either A or B could be a NaN
759 if (HONOR_NANS (GET_MODE (if_info
->x
))
760 || HONOR_SIGNED_ZEROS (GET_MODE (if_info
->x
)))
763 /* Check whether the operands of the comparison are A and in
765 if ((rtx_equal_p (if_info
->a
, XEXP (cond
, 0))
766 && rtx_equal_p (if_info
->b
, XEXP (cond
, 1)))
767 || (rtx_equal_p (if_info
->a
, XEXP (cond
, 1))
768 && rtx_equal_p (if_info
->b
, XEXP (cond
, 0))))
770 y
= (code
== EQ
) ? if_info
->a
: if_info
->b
;
772 /* Avoid generating the move if the source is the destination. */
773 if (! rtx_equal_p (if_info
->x
, y
))
776 noce_emit_move_insn (if_info
->x
, y
);
777 seq
= end_ifcvt_sequence (if_info
);
781 emit_insn_before_setloc (seq
, if_info
->jump
,
782 INSN_LOCATOR (if_info
->insn_a
));
789 /* Convert "if (test) x = 1; else x = 0".
791 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
792 tried in noce_try_store_flag_constants after noce_try_cmove has had
793 a go at the conversion. */
796 noce_try_store_flag (struct noce_if_info
*if_info
)
801 if (GET_CODE (if_info
->b
) == CONST_INT
802 && INTVAL (if_info
->b
) == STORE_FLAG_VALUE
803 && if_info
->a
== const0_rtx
)
805 else if (if_info
->b
== const0_rtx
806 && GET_CODE (if_info
->a
) == CONST_INT
807 && INTVAL (if_info
->a
) == STORE_FLAG_VALUE
808 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
816 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, 0);
819 if (target
!= if_info
->x
)
820 noce_emit_move_insn (if_info
->x
, target
);
822 seq
= end_ifcvt_sequence (if_info
);
826 emit_insn_before_setloc (seq
, if_info
->jump
,
827 INSN_LOCATOR (if_info
->insn_a
));
837 /* Convert "if (test) x = a; else x = b", for A and B constant. */
840 noce_try_store_flag_constants (struct noce_if_info
*if_info
)
844 HOST_WIDE_INT itrue
, ifalse
, diff
, tmp
;
845 int normalize
, can_reverse
;
846 enum machine_mode mode
;
849 && GET_CODE (if_info
->a
) == CONST_INT
850 && GET_CODE (if_info
->b
) == CONST_INT
)
852 mode
= GET_MODE (if_info
->x
);
853 ifalse
= INTVAL (if_info
->a
);
854 itrue
= INTVAL (if_info
->b
);
856 /* Make sure we can represent the difference between the two values. */
857 if ((itrue
- ifalse
> 0)
858 != ((ifalse
< 0) != (itrue
< 0) ? ifalse
< 0 : ifalse
< itrue
))
861 diff
= trunc_int_for_mode (itrue
- ifalse
, mode
);
863 can_reverse
= (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
867 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
869 else if (ifalse
== 0 && exact_log2 (itrue
) >= 0
870 && (STORE_FLAG_VALUE
== 1
871 || BRANCH_COST
>= 2))
873 else if (itrue
== 0 && exact_log2 (ifalse
) >= 0 && can_reverse
874 && (STORE_FLAG_VALUE
== 1 || BRANCH_COST
>= 2))
875 normalize
= 1, reversep
= 1;
877 && (STORE_FLAG_VALUE
== -1
878 || BRANCH_COST
>= 2))
880 else if (ifalse
== -1 && can_reverse
881 && (STORE_FLAG_VALUE
== -1 || BRANCH_COST
>= 2))
882 normalize
= -1, reversep
= 1;
883 else if ((BRANCH_COST
>= 2 && STORE_FLAG_VALUE
== -1)
891 tmp
= itrue
; itrue
= ifalse
; ifalse
= tmp
;
892 diff
= trunc_int_for_mode (-diff
, mode
);
896 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, normalize
);
903 /* if (test) x = 3; else x = 4;
904 => x = 3 + (test == 0); */
905 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
907 target
= expand_simple_binop (mode
,
908 (diff
== STORE_FLAG_VALUE
910 GEN_INT (ifalse
), target
, if_info
->x
, 0,
914 /* if (test) x = 8; else x = 0;
915 => x = (test != 0) << 3; */
916 else if (ifalse
== 0 && (tmp
= exact_log2 (itrue
)) >= 0)
918 target
= expand_simple_binop (mode
, ASHIFT
,
919 target
, GEN_INT (tmp
), if_info
->x
, 0,
923 /* if (test) x = -1; else x = b;
924 => x = -(test != 0) | b; */
925 else if (itrue
== -1)
927 target
= expand_simple_binop (mode
, IOR
,
928 target
, GEN_INT (ifalse
), if_info
->x
, 0,
932 /* if (test) x = a; else x = b;
933 => x = (-(test != 0) & (b - a)) + a; */
936 target
= expand_simple_binop (mode
, AND
,
937 target
, GEN_INT (diff
), if_info
->x
, 0,
940 target
= expand_simple_binop (mode
, PLUS
,
941 target
, GEN_INT (ifalse
),
942 if_info
->x
, 0, OPTAB_WIDEN
);
951 if (target
!= if_info
->x
)
952 noce_emit_move_insn (if_info
->x
, target
);
954 seq
= end_ifcvt_sequence (if_info
);
958 emit_insn_before_setloc (seq
, if_info
->jump
,
959 INSN_LOCATOR (if_info
->insn_a
));
966 /* Convert "if (test) foo++" into "foo += (test != 0)", and
967 similarly for "foo--". */
970 noce_try_addcc (struct noce_if_info
*if_info
)
973 int subtract
, normalize
;
976 && GET_CODE (if_info
->a
) == PLUS
977 && rtx_equal_p (XEXP (if_info
->a
, 0), if_info
->b
)
978 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
981 rtx cond
= if_info
->cond
;
982 enum rtx_code code
= reversed_comparison_code (cond
, if_info
->jump
);
984 /* First try to use addcc pattern. */
985 if (general_operand (XEXP (cond
, 0), VOIDmode
)
986 && general_operand (XEXP (cond
, 1), VOIDmode
))
989 target
= emit_conditional_add (if_info
->x
, code
,
994 XEXP (if_info
->a
, 1),
995 GET_MODE (if_info
->x
),
996 (code
== LTU
|| code
== GEU
997 || code
== LEU
|| code
== GTU
));
1000 if (target
!= if_info
->x
)
1001 noce_emit_move_insn (if_info
->x
, target
);
1003 seq
= end_ifcvt_sequence (if_info
);
1007 emit_insn_before_setloc (seq
, if_info
->jump
,
1008 INSN_LOCATOR (if_info
->insn_a
));
1014 /* If that fails, construct conditional increment or decrement using
1016 if (BRANCH_COST
>= 2
1017 && (XEXP (if_info
->a
, 1) == const1_rtx
1018 || XEXP (if_info
->a
, 1) == constm1_rtx
))
1021 if (STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
1022 subtract
= 0, normalize
= 0;
1023 else if (-STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
1024 subtract
= 1, normalize
= 0;
1026 subtract
= 0, normalize
= INTVAL (XEXP (if_info
->a
, 1));
1029 target
= noce_emit_store_flag (if_info
,
1030 gen_reg_rtx (GET_MODE (if_info
->x
)),
1034 target
= expand_simple_binop (GET_MODE (if_info
->x
),
1035 subtract
? MINUS
: PLUS
,
1036 if_info
->b
, target
, if_info
->x
,
1040 if (target
!= if_info
->x
)
1041 noce_emit_move_insn (if_info
->x
, target
);
1043 seq
= end_ifcvt_sequence (if_info
);
1047 emit_insn_before_setloc (seq
, if_info
->jump
,
1048 INSN_LOCATOR (if_info
->insn_a
));
1058 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1061 noce_try_store_flag_mask (struct noce_if_info
*if_info
)
1067 if (! no_new_pseudos
1068 && (BRANCH_COST
>= 2
1069 || STORE_FLAG_VALUE
== -1)
1070 && ((if_info
->a
== const0_rtx
1071 && rtx_equal_p (if_info
->b
, if_info
->x
))
1072 || ((reversep
= (reversed_comparison_code (if_info
->cond
,
1075 && if_info
->b
== const0_rtx
1076 && rtx_equal_p (if_info
->a
, if_info
->x
))))
1079 target
= noce_emit_store_flag (if_info
,
1080 gen_reg_rtx (GET_MODE (if_info
->x
)),
1083 target
= expand_simple_binop (GET_MODE (if_info
->x
), AND
,
1085 target
, if_info
->x
, 0,
1090 if (target
!= if_info
->x
)
1091 noce_emit_move_insn (if_info
->x
, target
);
1093 seq
= end_ifcvt_sequence (if_info
);
1097 emit_insn_before_setloc (seq
, if_info
->jump
,
1098 INSN_LOCATOR (if_info
->insn_a
));
1108 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1111 noce_emit_cmove (struct noce_if_info
*if_info
, rtx x
, enum rtx_code code
,
1112 rtx cmp_a
, rtx cmp_b
, rtx vfalse
, rtx vtrue
)
1114 /* If earliest == jump, try to build the cmove insn directly.
1115 This is helpful when combine has created some complex condition
1116 (like for alpha's cmovlbs) that we can't hope to regenerate
1117 through the normal interface. */
1119 if (if_info
->cond_earliest
== if_info
->jump
)
1123 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (if_info
->cond
), cmp_a
, cmp_b
);
1124 tmp
= gen_rtx_IF_THEN_ELSE (GET_MODE (x
), tmp
, vtrue
, vfalse
);
1125 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
1128 tmp
= emit_insn (tmp
);
1130 if (recog_memoized (tmp
) >= 0)
1142 /* Don't even try if the comparison operands are weird. */
1143 if (! general_operand (cmp_a
, GET_MODE (cmp_a
))
1144 || ! general_operand (cmp_b
, GET_MODE (cmp_b
)))
1147 #if HAVE_conditional_move
1148 return emit_conditional_move (x
, code
, cmp_a
, cmp_b
, VOIDmode
,
1149 vtrue
, vfalse
, GET_MODE (x
),
1150 (code
== LTU
|| code
== GEU
1151 || code
== LEU
|| code
== GTU
));
1153 /* We'll never get here, as noce_process_if_block doesn't call the
1154 functions involved. Ifdef code, however, should be discouraged
1155 because it leads to typos in the code not selected. However,
1156 emit_conditional_move won't exist either. */
1161 /* Try only simple constants and registers here. More complex cases
1162 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1163 has had a go at it. */
1166 noce_try_cmove (struct noce_if_info
*if_info
)
1171 if ((CONSTANT_P (if_info
->a
) || register_operand (if_info
->a
, VOIDmode
))
1172 && (CONSTANT_P (if_info
->b
) || register_operand (if_info
->b
, VOIDmode
)))
1176 code
= GET_CODE (if_info
->cond
);
1177 target
= noce_emit_cmove (if_info
, if_info
->x
, code
,
1178 XEXP (if_info
->cond
, 0),
1179 XEXP (if_info
->cond
, 1),
1180 if_info
->a
, if_info
->b
);
1184 if (target
!= if_info
->x
)
1185 noce_emit_move_insn (if_info
->x
, target
);
1187 seq
= end_ifcvt_sequence (if_info
);
1191 emit_insn_before_setloc (seq
, if_info
->jump
,
1192 INSN_LOCATOR (if_info
->insn_a
));
1205 /* Try more complex cases involving conditional_move. */
1208 noce_try_cmove_arith (struct noce_if_info
*if_info
)
1218 /* A conditional move from two memory sources is equivalent to a
1219 conditional on their addresses followed by a load. Don't do this
1220 early because it'll screw alias analysis. Note that we've
1221 already checked for no side effects. */
1222 if (! no_new_pseudos
&& cse_not_expected
1223 && MEM_P (a
) && MEM_P (b
)
1224 && BRANCH_COST
>= 5)
1228 x
= gen_reg_rtx (Pmode
);
1232 /* ??? We could handle this if we knew that a load from A or B could
1233 not fault. This is also true if we've already loaded
1234 from the address along the path from ENTRY. */
1235 else if (may_trap_p (a
) || may_trap_p (b
))
1238 /* if (test) x = a + b; else x = c - d;
1245 code
= GET_CODE (if_info
->cond
);
1246 insn_a
= if_info
->insn_a
;
1247 insn_b
= if_info
->insn_b
;
1249 /* Possibly rearrange operands to make things come out more natural. */
1250 if (reversed_comparison_code (if_info
->cond
, if_info
->jump
) != UNKNOWN
)
1253 if (rtx_equal_p (b
, x
))
1255 else if (general_operand (b
, GET_MODE (b
)))
1260 code
= reversed_comparison_code (if_info
->cond
, if_info
->jump
);
1261 tmp
= a
, a
= b
, b
= tmp
;
1262 tmp
= insn_a
, insn_a
= insn_b
, insn_b
= tmp
;
1268 /* If either operand is complex, load it into a register first.
1269 The best way to do this is to copy the original insn. In this
1270 way we preserve any clobbers etc that the insn may have had.
1271 This is of course not possible in the IS_MEM case. */
1272 if (! general_operand (a
, GET_MODE (a
)))
1277 goto end_seq_and_fail
;
1281 tmp
= gen_reg_rtx (GET_MODE (a
));
1282 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, a
));
1285 goto end_seq_and_fail
;
1288 a
= gen_reg_rtx (GET_MODE (a
));
1289 tmp
= copy_rtx (insn_a
);
1290 set
= single_set (tmp
);
1292 tmp
= emit_insn (PATTERN (tmp
));
1294 if (recog_memoized (tmp
) < 0)
1295 goto end_seq_and_fail
;
1297 if (! general_operand (b
, GET_MODE (b
)))
1302 goto end_seq_and_fail
;
1306 tmp
= gen_reg_rtx (GET_MODE (b
));
1307 tmp
= emit_insn (gen_rtx_SET (VOIDmode
,
1312 goto end_seq_and_fail
;
1315 b
= gen_reg_rtx (GET_MODE (b
));
1316 tmp
= copy_rtx (insn_b
);
1317 set
= single_set (tmp
);
1319 tmp
= emit_insn (PATTERN (tmp
));
1321 if (recog_memoized (tmp
) < 0)
1322 goto end_seq_and_fail
;
1325 target
= noce_emit_cmove (if_info
, x
, code
, XEXP (if_info
->cond
, 0),
1326 XEXP (if_info
->cond
, 1), a
, b
);
1329 goto end_seq_and_fail
;
1331 /* If we're handling a memory for above, emit the load now. */
1334 tmp
= gen_rtx_MEM (GET_MODE (if_info
->x
), target
);
1336 /* Copy over flags as appropriate. */
1337 if (MEM_VOLATILE_P (if_info
->a
) || MEM_VOLATILE_P (if_info
->b
))
1338 MEM_VOLATILE_P (tmp
) = 1;
1339 if (MEM_IN_STRUCT_P (if_info
->a
) && MEM_IN_STRUCT_P (if_info
->b
))
1340 MEM_IN_STRUCT_P (tmp
) = 1;
1341 if (MEM_SCALAR_P (if_info
->a
) && MEM_SCALAR_P (if_info
->b
))
1342 MEM_SCALAR_P (tmp
) = 1;
1343 if (MEM_ALIAS_SET (if_info
->a
) == MEM_ALIAS_SET (if_info
->b
))
1344 set_mem_alias_set (tmp
, MEM_ALIAS_SET (if_info
->a
));
1346 MIN (MEM_ALIGN (if_info
->a
), MEM_ALIGN (if_info
->b
)));
1348 noce_emit_move_insn (if_info
->x
, tmp
);
1350 else if (target
!= x
)
1351 noce_emit_move_insn (x
, target
);
1353 tmp
= end_ifcvt_sequence (if_info
);
1357 emit_insn_before_setloc (tmp
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1365 /* For most cases, the simplified condition we found is the best
1366 choice, but this is not the case for the min/max/abs transforms.
1367 For these we wish to know that it is A or B in the condition. */
1370 noce_get_alt_condition (struct noce_if_info
*if_info
, rtx target
,
1373 rtx cond
, set
, insn
;
1376 /* If target is already mentioned in the known condition, return it. */
1377 if (reg_mentioned_p (target
, if_info
->cond
))
1379 *earliest
= if_info
->cond_earliest
;
1380 return if_info
->cond
;
1383 set
= pc_set (if_info
->jump
);
1384 cond
= XEXP (SET_SRC (set
), 0);
1386 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1387 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (if_info
->jump
);
1389 /* If we're looking for a constant, try to make the conditional
1390 have that constant in it. There are two reasons why it may
1391 not have the constant we want:
1393 1. GCC may have needed to put the constant in a register, because
1394 the target can't compare directly against that constant. For
1395 this case, we look for a SET immediately before the comparison
1396 that puts a constant in that register.
1398 2. GCC may have canonicalized the conditional, for example
1399 replacing "if x < 4" with "if x <= 3". We can undo that (or
1400 make equivalent types of changes) to get the constants we need
1401 if they're off by one in the right direction. */
1403 if (GET_CODE (target
) == CONST_INT
)
1405 enum rtx_code code
= GET_CODE (if_info
->cond
);
1406 rtx op_a
= XEXP (if_info
->cond
, 0);
1407 rtx op_b
= XEXP (if_info
->cond
, 1);
1410 /* First, look to see if we put a constant in a register. */
1411 prev_insn
= PREV_INSN (if_info
->cond_earliest
);
1413 && INSN_P (prev_insn
)
1414 && GET_CODE (PATTERN (prev_insn
)) == SET
)
1416 rtx src
= find_reg_equal_equiv_note (prev_insn
);
1418 src
= SET_SRC (PATTERN (prev_insn
));
1419 if (GET_CODE (src
) == CONST_INT
)
1421 if (rtx_equal_p (op_a
, SET_DEST (PATTERN (prev_insn
))))
1423 else if (rtx_equal_p (op_b
, SET_DEST (PATTERN (prev_insn
))))
1426 if (GET_CODE (op_a
) == CONST_INT
)
1431 code
= swap_condition (code
);
1436 /* Now, look to see if we can get the right constant by
1437 adjusting the conditional. */
1438 if (GET_CODE (op_b
) == CONST_INT
)
1440 HOST_WIDE_INT desired_val
= INTVAL (target
);
1441 HOST_WIDE_INT actual_val
= INTVAL (op_b
);
1446 if (actual_val
== desired_val
+ 1)
1449 op_b
= GEN_INT (desired_val
);
1453 if (actual_val
== desired_val
- 1)
1456 op_b
= GEN_INT (desired_val
);
1460 if (actual_val
== desired_val
- 1)
1463 op_b
= GEN_INT (desired_val
);
1467 if (actual_val
== desired_val
+ 1)
1470 op_b
= GEN_INT (desired_val
);
1478 /* If we made any changes, generate a new conditional that is
1479 equivalent to what we started with, but has the right
1481 if (code
!= GET_CODE (if_info
->cond
)
1482 || op_a
!= XEXP (if_info
->cond
, 0)
1483 || op_b
!= XEXP (if_info
->cond
, 1))
1485 cond
= gen_rtx_fmt_ee (code
, GET_MODE (cond
), op_a
, op_b
);
1486 *earliest
= if_info
->cond_earliest
;
1491 cond
= canonicalize_condition (if_info
->jump
, cond
, reverse
,
1492 earliest
, target
, false, true);
1493 if (! cond
|| ! reg_mentioned_p (target
, cond
))
1496 /* We almost certainly searched back to a different place.
1497 Need to re-verify correct lifetimes. */
1499 /* X may not be mentioned in the range (cond_earliest, jump]. */
1500 for (insn
= if_info
->jump
; insn
!= *earliest
; insn
= PREV_INSN (insn
))
1501 if (INSN_P (insn
) && reg_overlap_mentioned_p (if_info
->x
, PATTERN (insn
)))
1504 /* A and B may not be modified in the range [cond_earliest, jump). */
1505 for (insn
= *earliest
; insn
!= if_info
->jump
; insn
= NEXT_INSN (insn
))
1507 && (modified_in_p (if_info
->a
, insn
)
1508 || modified_in_p (if_info
->b
, insn
)))
1514 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1517 noce_try_minmax (struct noce_if_info
*if_info
)
1519 rtx cond
, earliest
, target
, seq
;
1520 enum rtx_code code
, op
;
1523 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1527 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1528 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1529 to get the target to tell us... */
1530 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info
->x
))
1531 || HONOR_NANS (GET_MODE (if_info
->x
)))
1534 cond
= noce_get_alt_condition (if_info
, if_info
->a
, &earliest
);
1538 /* Verify the condition is of the form we expect, and canonicalize
1539 the comparison code. */
1540 code
= GET_CODE (cond
);
1541 if (rtx_equal_p (XEXP (cond
, 0), if_info
->a
))
1543 if (! rtx_equal_p (XEXP (cond
, 1), if_info
->b
))
1546 else if (rtx_equal_p (XEXP (cond
, 1), if_info
->a
))
1548 if (! rtx_equal_p (XEXP (cond
, 0), if_info
->b
))
1550 code
= swap_condition (code
);
1555 /* Determine what sort of operation this is. Note that the code is for
1556 a taken branch, so the code->operation mapping appears backwards. */
1589 target
= expand_simple_binop (GET_MODE (if_info
->x
), op
,
1590 if_info
->a
, if_info
->b
,
1591 if_info
->x
, unsignedp
, OPTAB_WIDEN
);
1597 if (target
!= if_info
->x
)
1598 noce_emit_move_insn (if_info
->x
, target
);
1600 seq
= end_ifcvt_sequence (if_info
);
1604 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1605 if_info
->cond
= cond
;
1606 if_info
->cond_earliest
= earliest
;
1611 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1614 noce_try_abs (struct noce_if_info
*if_info
)
1616 rtx cond
, earliest
, target
, seq
, a
, b
, c
;
1619 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1623 /* Recognize A and B as constituting an ABS or NABS. */
1626 if (GET_CODE (a
) == NEG
&& rtx_equal_p (XEXP (a
, 0), b
))
1628 else if (GET_CODE (b
) == NEG
&& rtx_equal_p (XEXP (b
, 0), a
))
1630 c
= a
; a
= b
; b
= c
;
1636 cond
= noce_get_alt_condition (if_info
, b
, &earliest
);
1640 /* Verify the condition is of the form we expect. */
1641 if (rtx_equal_p (XEXP (cond
, 0), b
))
1643 else if (rtx_equal_p (XEXP (cond
, 1), b
))
1648 /* Verify that C is zero. Search backward through the block for
1649 a REG_EQUAL note if necessary. */
1652 rtx insn
, note
= NULL
;
1653 for (insn
= earliest
;
1654 insn
!= BB_HEAD (if_info
->test_bb
);
1655 insn
= PREV_INSN (insn
))
1657 && ((note
= find_reg_note (insn
, REG_EQUAL
, c
))
1658 || (note
= find_reg_note (insn
, REG_EQUIV
, c
))))
1665 && GET_CODE (XEXP (c
, 0)) == SYMBOL_REF
1666 && CONSTANT_POOL_ADDRESS_P (XEXP (c
, 0)))
1667 c
= get_pool_constant (XEXP (c
, 0));
1669 /* Work around funny ideas get_condition has wrt canonicalization.
1670 Note that these rtx constants are known to be CONST_INT, and
1671 therefore imply integer comparisons. */
1672 if (c
== constm1_rtx
&& GET_CODE (cond
) == GT
)
1674 else if (c
== const1_rtx
&& GET_CODE (cond
) == LT
)
1676 else if (c
!= CONST0_RTX (GET_MODE (b
)))
1679 /* Determine what sort of operation this is. */
1680 switch (GET_CODE (cond
))
1699 target
= expand_abs_nojump (GET_MODE (if_info
->x
), b
, if_info
->x
, 1);
1701 /* ??? It's a quandary whether cmove would be better here, especially
1702 for integers. Perhaps combine will clean things up. */
1703 if (target
&& negate
)
1704 target
= expand_simple_unop (GET_MODE (target
), NEG
, target
, if_info
->x
, 0);
1712 if (target
!= if_info
->x
)
1713 noce_emit_move_insn (if_info
->x
, target
);
1715 seq
= end_ifcvt_sequence (if_info
);
1719 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1720 if_info
->cond
= cond
;
1721 if_info
->cond_earliest
= earliest
;
1726 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1729 noce_try_sign_mask (struct noce_if_info
*if_info
)
1731 rtx cond
, t
, m
, c
, seq
;
1732 enum machine_mode mode
;
1738 cond
= if_info
->cond
;
1739 code
= GET_CODE (cond
);
1744 if (if_info
->a
== const0_rtx
)
1746 if ((code
== LT
&& c
== const0_rtx
)
1747 || (code
== LE
&& c
== constm1_rtx
))
1750 else if (if_info
->b
== const0_rtx
)
1752 if ((code
== GE
&& c
== const0_rtx
)
1753 || (code
== GT
&& c
== constm1_rtx
))
1757 if (! t
|| side_effects_p (t
))
1760 /* We currently don't handle different modes. */
1761 mode
= GET_MODE (t
);
1762 if (GET_MODE (m
) != mode
)
1765 /* This is only profitable if T is cheap, or T is unconditionally
1766 executed/evaluated in the original insn sequence. */
1767 if (rtx_cost (t
, SET
) >= COSTS_N_INSNS (2)
1768 && (!if_info
->b_unconditional
1769 || t
!= if_info
->b
))
1773 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1774 "(signed) m >> 31" directly. This benefits targets with specialized
1775 insns to obtain the signmask, but still uses ashr_optab otherwise. */
1776 m
= emit_store_flag (gen_reg_rtx (mode
), LT
, m
, const0_rtx
, mode
, 0, -1);
1777 t
= m
? expand_binop (mode
, and_optab
, m
, t
, NULL_RTX
, 0, OPTAB_DIRECT
)
1786 noce_emit_move_insn (if_info
->x
, t
);
1788 seq
= end_ifcvt_sequence (if_info
);
1792 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1797 /* Similar to get_condition, only the resulting condition must be
1798 valid at JUMP, instead of at EARLIEST. */
1801 noce_get_condition (rtx jump
, rtx
*earliest
)
1806 if (! any_condjump_p (jump
))
1809 set
= pc_set (jump
);
1811 /* If this branches to JUMP_LABEL when the condition is false,
1812 reverse the condition. */
1813 reverse
= (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1814 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
));
1816 /* If the condition variable is a register and is MODE_INT, accept it. */
1818 cond
= XEXP (SET_SRC (set
), 0);
1819 tmp
= XEXP (cond
, 0);
1820 if (REG_P (tmp
) && GET_MODE_CLASS (GET_MODE (tmp
)) == MODE_INT
)
1825 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
1826 GET_MODE (cond
), tmp
, XEXP (cond
, 1));
1830 /* Otherwise, fall back on canonicalize_condition to do the dirty
1831 work of manipulating MODE_CC values and COMPARE rtx codes. */
1832 return canonicalize_condition (jump
, cond
, reverse
, earliest
,
1833 NULL_RTX
, false, true);
1836 /* Return true if OP is ok for if-then-else processing. */
1839 noce_operand_ok (rtx op
)
1841 /* We special-case memories, so handle any of them with
1842 no address side effects. */
1844 return ! side_effects_p (XEXP (op
, 0));
1846 if (side_effects_p (op
))
1849 return ! may_trap_p (op
);
1852 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1853 without using conditional execution. Return TRUE if we were
1854 successful at converting the block. */
1857 noce_process_if_block (struct ce_if_block
* ce_info
)
1859 basic_block test_bb
= ce_info
->test_bb
; /* test block */
1860 basic_block then_bb
= ce_info
->then_bb
; /* THEN */
1861 basic_block else_bb
= ce_info
->else_bb
; /* ELSE or NULL */
1862 struct noce_if_info if_info
;
1865 rtx orig_x
, x
, a
, b
;
1868 /* We're looking for patterns of the form
1870 (1) if (...) x = a; else x = b;
1871 (2) x = b; if (...) x = a;
1872 (3) if (...) x = a; // as if with an initial x = x.
1874 The later patterns require jumps to be more expensive.
1876 ??? For future expansion, look for multiple X in such patterns. */
1878 /* If test is comprised of && or || elements, don't handle it unless it is
1879 the special case of && elements without an ELSE block. */
1880 if (ce_info
->num_multiple_test_blocks
)
1882 if (else_bb
|| ! ce_info
->and_and_p
)
1885 ce_info
->test_bb
= test_bb
= ce_info
->last_test_bb
;
1886 ce_info
->num_multiple_test_blocks
= 0;
1887 ce_info
->num_and_and_blocks
= 0;
1888 ce_info
->num_or_or_blocks
= 0;
1891 /* If this is not a standard conditional jump, we can't parse it. */
1892 jump
= BB_END (test_bb
);
1893 cond
= noce_get_condition (jump
, &if_info
.cond_earliest
);
1897 /* If the conditional jump is more than just a conditional
1898 jump, then we can not do if-conversion on this block. */
1899 if (! onlyjump_p (jump
))
1902 /* We must be comparing objects whose modes imply the size. */
1903 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
1906 /* Look for one of the potential sets. */
1907 insn_a
= first_active_insn (then_bb
);
1909 || insn_a
!= last_active_insn (then_bb
, FALSE
)
1910 || (set_a
= single_set (insn_a
)) == NULL_RTX
)
1913 x
= SET_DEST (set_a
);
1914 a
= SET_SRC (set_a
);
1916 /* Look for the other potential set. Make sure we've got equivalent
1918 /* ??? This is overconservative. Storing to two different mems is
1919 as easy as conditionally computing the address. Storing to a
1920 single mem merely requires a scratch memory to use as one of the
1921 destination addresses; often the memory immediately below the
1922 stack pointer is available for this. */
1926 insn_b
= first_active_insn (else_bb
);
1928 || insn_b
!= last_active_insn (else_bb
, FALSE
)
1929 || (set_b
= single_set (insn_b
)) == NULL_RTX
1930 || ! rtx_equal_p (x
, SET_DEST (set_b
)))
1935 insn_b
= prev_nonnote_insn (if_info
.cond_earliest
);
1936 /* We're going to be moving the evaluation of B down from above
1937 COND_EARLIEST to JUMP. Make sure the relevant data is still
1940 || !NONJUMP_INSN_P (insn_b
)
1941 || (set_b
= single_set (insn_b
)) == NULL_RTX
1942 || ! rtx_equal_p (x
, SET_DEST (set_b
))
1943 || reg_overlap_mentioned_p (x
, SET_SRC (set_b
))
1944 || modified_between_p (SET_SRC (set_b
),
1945 PREV_INSN (if_info
.cond_earliest
), jump
)
1946 /* Likewise with X. In particular this can happen when
1947 noce_get_condition looks farther back in the instruction
1948 stream than one might expect. */
1949 || reg_overlap_mentioned_p (x
, cond
)
1950 || reg_overlap_mentioned_p (x
, a
)
1951 || modified_between_p (x
, PREV_INSN (if_info
.cond_earliest
), jump
))
1952 insn_b
= set_b
= NULL_RTX
;
1955 /* If x has side effects then only the if-then-else form is safe to
1956 convert. But even in that case we would need to restore any notes
1957 (such as REG_INC) at then end. That can be tricky if
1958 noce_emit_move_insn expands to more than one insn, so disable the
1959 optimization entirely for now if there are side effects. */
1960 if (side_effects_p (x
))
1963 b
= (set_b
? SET_SRC (set_b
) : x
);
1965 /* Only operate on register destinations, and even then avoid extending
1966 the lifetime of hard registers on small register class machines. */
1969 || (SMALL_REGISTER_CLASSES
1970 && REGNO (x
) < FIRST_PSEUDO_REGISTER
))
1972 if (no_new_pseudos
|| GET_MODE (x
) == BLKmode
)
1974 x
= gen_reg_rtx (GET_MODE (GET_CODE (x
) == STRICT_LOW_PART
1975 ? XEXP (x
, 0) : x
));
1978 /* Don't operate on sources that may trap or are volatile. */
1979 if (! noce_operand_ok (a
) || ! noce_operand_ok (b
))
1982 /* Set up the info block for our subroutines. */
1983 if_info
.test_bb
= test_bb
;
1984 if_info
.cond
= cond
;
1985 if_info
.jump
= jump
;
1986 if_info
.insn_a
= insn_a
;
1987 if_info
.insn_b
= insn_b
;
1991 if_info
.b_unconditional
= else_bb
== 0;
1993 /* Try optimizations in some approximation of a useful order. */
1994 /* ??? Should first look to see if X is live incoming at all. If it
1995 isn't, we don't need anything but an unconditional set. */
1997 /* Look and see if A and B are really the same. Avoid creating silly
1998 cmove constructs that no one will fix up later. */
1999 if (rtx_equal_p (a
, b
))
2001 /* If we have an INSN_B, we don't have to create any new rtl. Just
2002 move the instruction that we already have. If we don't have an
2003 INSN_B, that means that A == X, and we've got a noop move. In
2004 that case don't do anything and let the code below delete INSN_A. */
2005 if (insn_b
&& else_bb
)
2009 if (else_bb
&& insn_b
== BB_END (else_bb
))
2010 BB_END (else_bb
) = PREV_INSN (insn_b
);
2011 reorder_insns (insn_b
, insn_b
, PREV_INSN (jump
));
2013 /* If there was a REG_EQUAL note, delete it since it may have been
2014 true due to this insn being after a jump. */
2015 if ((note
= find_reg_note (insn_b
, REG_EQUAL
, NULL_RTX
)) != 0)
2016 remove_note (insn_b
, note
);
2020 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2021 x must be executed twice. */
2022 else if (insn_b
&& side_effects_p (orig_x
))
2029 /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2030 for most optimizations if writing to x may trap, i.e. it's a memory
2031 other than a static var or a stack slot. */
2034 && ! MEM_NOTRAP_P (orig_x
)
2035 && rtx_addr_can_trap_p (XEXP (orig_x
, 0)))
2037 if (HAVE_conditional_move
)
2039 if (noce_try_cmove (&if_info
))
2041 if (! HAVE_conditional_execution
2042 && noce_try_cmove_arith (&if_info
))
2048 if (noce_try_move (&if_info
))
2050 if (noce_try_store_flag (&if_info
))
2052 if (noce_try_minmax (&if_info
))
2054 if (noce_try_abs (&if_info
))
2056 if (HAVE_conditional_move
2057 && noce_try_cmove (&if_info
))
2059 if (! HAVE_conditional_execution
)
2061 if (noce_try_store_flag_constants (&if_info
))
2063 if (noce_try_addcc (&if_info
))
2065 if (noce_try_store_flag_mask (&if_info
))
2067 if (HAVE_conditional_move
2068 && noce_try_cmove_arith (&if_info
))
2070 if (noce_try_sign_mask (&if_info
))
2077 /* The original sets may now be killed. */
2078 delete_insn (insn_a
);
2080 /* Several special cases here: First, we may have reused insn_b above,
2081 in which case insn_b is now NULL. Second, we want to delete insn_b
2082 if it came from the ELSE block, because follows the now correct
2083 write that appears in the TEST block. However, if we got insn_b from
2084 the TEST block, it may in fact be loading data needed for the comparison.
2085 We'll let life_analysis remove the insn if it's really dead. */
2086 if (insn_b
&& else_bb
)
2087 delete_insn (insn_b
);
2089 /* The new insns will have been inserted immediately before the jump. We
2090 should be able to remove the jump with impunity, but the condition itself
2091 may have been modified by gcse to be shared across basic blocks. */
2094 /* If we used a temporary, fix it up now. */
2098 noce_emit_move_insn (orig_x
, x
);
2099 insn_b
= get_insns ();
2100 set_used_flags (orig_x
);
2101 unshare_all_rtl_in_chain (insn_b
);
2104 emit_insn_after_setloc (insn_b
, BB_END (test_bb
), INSN_LOCATOR (insn_a
));
2107 /* Merge the blocks! */
2108 merge_if_block (ce_info
);
2113 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2114 straight line code. Return true if successful. */
2117 process_if_block (struct ce_if_block
* ce_info
)
2119 if (! reload_completed
2120 && noce_process_if_block (ce_info
))
2123 if (HAVE_conditional_execution
&& reload_completed
)
2125 /* If we have && and || tests, try to first handle combining the && and
2126 || tests into the conditional code, and if that fails, go back and
2127 handle it without the && and ||, which at present handles the && case
2128 if there was no ELSE block. */
2129 if (cond_exec_process_if_block (ce_info
, TRUE
))
2132 if (ce_info
->num_multiple_test_blocks
)
2136 if (cond_exec_process_if_block (ce_info
, FALSE
))
2144 /* Merge the blocks and mark for local life update. */
2147 merge_if_block (struct ce_if_block
* ce_info
)
2149 basic_block test_bb
= ce_info
->test_bb
; /* last test block */
2150 basic_block then_bb
= ce_info
->then_bb
; /* THEN */
2151 basic_block else_bb
= ce_info
->else_bb
; /* ELSE or NULL */
2152 basic_block join_bb
= ce_info
->join_bb
; /* join block */
2153 basic_block combo_bb
;
2155 /* All block merging is done into the lower block numbers. */
2159 /* Merge any basic blocks to handle && and || subtests. Each of
2160 the blocks are on the fallthru path from the predecessor block. */
2161 if (ce_info
->num_multiple_test_blocks
> 0)
2163 basic_block bb
= test_bb
;
2164 basic_block last_test_bb
= ce_info
->last_test_bb
;
2165 basic_block fallthru
= block_fallthru (bb
);
2170 fallthru
= block_fallthru (bb
);
2171 merge_blocks (combo_bb
, bb
);
2174 while (bb
!= last_test_bb
);
2177 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2178 label, but it might if there were || tests. That label's count should be
2179 zero, and it normally should be removed. */
2183 if (combo_bb
->global_live_at_end
)
2184 COPY_REG_SET (combo_bb
->global_live_at_end
,
2185 then_bb
->global_live_at_end
);
2186 merge_blocks (combo_bb
, then_bb
);
2190 /* The ELSE block, if it existed, had a label. That label count
2191 will almost always be zero, but odd things can happen when labels
2192 get their addresses taken. */
2195 merge_blocks (combo_bb
, else_bb
);
2199 /* If there was no join block reported, that means it was not adjacent
2200 to the others, and so we cannot merge them. */
2204 rtx last
= BB_END (combo_bb
);
2206 /* The outgoing edge for the current COMBO block should already
2207 be correct. Verify this. */
2208 if (combo_bb
->succ
== NULL_EDGE
)
2210 if (find_reg_note (last
, REG_NORETURN
, NULL
))
2212 else if (NONJUMP_INSN_P (last
)
2213 && GET_CODE (PATTERN (last
)) == TRAP_IF
2214 && TRAP_CONDITION (PATTERN (last
)) == const_true_rtx
)
2220 /* There should still be something at the end of the THEN or ELSE
2221 blocks taking us to our final destination. */
2222 else if (JUMP_P (last
))
2224 else if (combo_bb
->succ
->dest
== EXIT_BLOCK_PTR
2226 && SIBLING_CALL_P (last
))
2228 else if ((combo_bb
->succ
->flags
& EDGE_EH
)
2229 && can_throw_internal (last
))
2235 /* The JOIN block may have had quite a number of other predecessors too.
2236 Since we've already merged the TEST, THEN and ELSE blocks, we should
2237 have only one remaining edge from our if-then-else diamond. If there
2238 is more than one remaining edge, it must come from elsewhere. There
2239 may be zero incoming edges if the THEN block didn't actually join
2240 back up (as with a call to abort). */
2241 else if ((join_bb
->pred
== NULL
2242 || join_bb
->pred
->pred_next
== NULL
)
2243 && join_bb
!= EXIT_BLOCK_PTR
)
2245 /* We can merge the JOIN. */
2246 if (combo_bb
->global_live_at_end
)
2247 COPY_REG_SET (combo_bb
->global_live_at_end
,
2248 join_bb
->global_live_at_end
);
2250 merge_blocks (combo_bb
, join_bb
);
2255 /* We cannot merge the JOIN. */
2257 /* The outgoing edge for the current COMBO block should already
2258 be correct. Verify this. */
2259 if (combo_bb
->succ
->succ_next
!= NULL_EDGE
2260 || combo_bb
->succ
->dest
!= join_bb
)
2263 /* Remove the jump and cruft from the end of the COMBO block. */
2264 if (join_bb
!= EXIT_BLOCK_PTR
)
2265 tidy_fallthru_edge (combo_bb
->succ
);
2268 num_updated_if_blocks
++;
2271 /* Find a block ending in a simple IF condition and try to transform it
2272 in some way. When converting a multi-block condition, put the new code
2273 in the first such block and delete the rest. Return a pointer to this
2274 first block if some transformation was done. Return NULL otherwise. */
2277 find_if_header (basic_block test_bb
, int pass
)
2279 ce_if_block_t ce_info
;
2283 /* The kind of block we're looking for has exactly two successors. */
2284 if ((then_edge
= test_bb
->succ
) == NULL_EDGE
2285 || (else_edge
= then_edge
->succ_next
) == NULL_EDGE
2286 || else_edge
->succ_next
!= NULL_EDGE
)
2289 /* Neither edge should be abnormal. */
2290 if ((then_edge
->flags
& EDGE_COMPLEX
)
2291 || (else_edge
->flags
& EDGE_COMPLEX
))
2294 /* Nor exit the loop. */
2295 if ((then_edge
->flags
& EDGE_LOOP_EXIT
)
2296 || (else_edge
->flags
& EDGE_LOOP_EXIT
))
2299 /* The THEN edge is canonically the one that falls through. */
2300 if (then_edge
->flags
& EDGE_FALLTHRU
)
2302 else if (else_edge
->flags
& EDGE_FALLTHRU
)
2305 else_edge
= then_edge
;
2309 /* Otherwise this must be a multiway branch of some sort. */
2312 memset (&ce_info
, '\0', sizeof (ce_info
));
2313 ce_info
.test_bb
= test_bb
;
2314 ce_info
.then_bb
= then_edge
->dest
;
2315 ce_info
.else_bb
= else_edge
->dest
;
2316 ce_info
.pass
= pass
;
2318 #ifdef IFCVT_INIT_EXTRA_FIELDS
2319 IFCVT_INIT_EXTRA_FIELDS (&ce_info
);
2322 if (find_if_block (&ce_info
))
2325 if (HAVE_trap
&& HAVE_conditional_trap
2326 && find_cond_trap (test_bb
, then_edge
, else_edge
))
2329 if (dom_computed
[CDI_POST_DOMINATORS
] >= DOM_NO_FAST_QUERY
2330 && (! HAVE_conditional_execution
|| reload_completed
))
2332 if (find_if_case_1 (test_bb
, then_edge
, else_edge
))
2334 if (find_if_case_2 (test_bb
, then_edge
, else_edge
))
2342 fprintf (dump_file
, "Conversion succeeded on pass %d.\n", pass
);
2343 return ce_info
.test_bb
;
2346 /* Return true if a block has two edges, one of which falls through to the next
2347 block, and the other jumps to a specific block, so that we can tell if the
2348 block is part of an && test or an || test. Returns either -1 or the number
2349 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2352 block_jumps_and_fallthru_p (basic_block cur_bb
, basic_block target_bb
)
2355 int fallthru_p
= FALSE
;
2361 if (!cur_bb
|| !target_bb
)
2364 /* If no edges, obviously it doesn't jump or fallthru. */
2365 if (cur_bb
->succ
== NULL_EDGE
)
2368 for (cur_edge
= cur_bb
->succ
;
2369 cur_edge
!= NULL_EDGE
;
2370 cur_edge
= cur_edge
->succ_next
)
2372 if (cur_edge
->flags
& EDGE_COMPLEX
)
2373 /* Anything complex isn't what we want. */
2376 else if (cur_edge
->flags
& EDGE_FALLTHRU
)
2379 else if (cur_edge
->dest
== target_bb
)
2386 if ((jump_p
& fallthru_p
) == 0)
2389 /* Don't allow calls in the block, since this is used to group && and ||
2390 together for conditional execution support. ??? we should support
2391 conditional execution support across calls for IA-64 some day, but
2392 for now it makes the code simpler. */
2393 end
= BB_END (cur_bb
);
2394 insn
= BB_HEAD (cur_bb
);
2396 while (insn
!= NULL_RTX
)
2403 && GET_CODE (PATTERN (insn
)) != USE
2404 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
2410 insn
= NEXT_INSN (insn
);
2416 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2417 block. If so, we'll try to convert the insns to not require the branch.
2418 Return TRUE if we were successful at converting the block. */
2421 find_if_block (struct ce_if_block
* ce_info
)
2423 basic_block test_bb
= ce_info
->test_bb
;
2424 basic_block then_bb
= ce_info
->then_bb
;
2425 basic_block else_bb
= ce_info
->else_bb
;
2426 basic_block join_bb
= NULL_BLOCK
;
2427 edge then_succ
= then_bb
->succ
;
2428 edge else_succ
= else_bb
->succ
;
2429 int then_predecessors
;
2430 int else_predecessors
;
2434 ce_info
->last_test_bb
= test_bb
;
2436 /* Discover if any fall through predecessors of the current test basic block
2437 were && tests (which jump to the else block) or || tests (which jump to
2439 if (HAVE_conditional_execution
&& reload_completed
2440 && test_bb
->pred
!= NULL_EDGE
2441 && test_bb
->pred
->pred_next
== NULL_EDGE
2442 && test_bb
->pred
->flags
== EDGE_FALLTHRU
)
2444 basic_block bb
= test_bb
->pred
->src
;
2445 basic_block target_bb
;
2446 int max_insns
= MAX_CONDITIONAL_EXECUTE
;
2449 /* Determine if the preceding block is an && or || block. */
2450 if ((n_insns
= block_jumps_and_fallthru_p (bb
, else_bb
)) >= 0)
2452 ce_info
->and_and_p
= TRUE
;
2453 target_bb
= else_bb
;
2455 else if ((n_insns
= block_jumps_and_fallthru_p (bb
, then_bb
)) >= 0)
2457 ce_info
->and_and_p
= FALSE
;
2458 target_bb
= then_bb
;
2461 target_bb
= NULL_BLOCK
;
2463 if (target_bb
&& n_insns
<= max_insns
)
2465 int total_insns
= 0;
2468 ce_info
->last_test_bb
= test_bb
;
2470 /* Found at least one && or || block, look for more. */
2473 ce_info
->test_bb
= test_bb
= bb
;
2474 total_insns
+= n_insns
;
2477 if (bb
->pred
== NULL_EDGE
|| bb
->pred
->pred_next
!= NULL_EDGE
)
2481 n_insns
= block_jumps_and_fallthru_p (bb
, target_bb
);
2483 while (n_insns
>= 0 && (total_insns
+ n_insns
) <= max_insns
);
2485 ce_info
->num_multiple_test_blocks
= blocks
;
2486 ce_info
->num_multiple_test_insns
= total_insns
;
2488 if (ce_info
->and_and_p
)
2489 ce_info
->num_and_and_blocks
= blocks
;
2491 ce_info
->num_or_or_blocks
= blocks
;
2495 /* Count the number of edges the THEN and ELSE blocks have. */
2496 then_predecessors
= 0;
2497 for (cur_edge
= then_bb
->pred
;
2498 cur_edge
!= NULL_EDGE
;
2499 cur_edge
= cur_edge
->pred_next
)
2501 then_predecessors
++;
2502 if (cur_edge
->flags
& EDGE_COMPLEX
)
2506 else_predecessors
= 0;
2507 for (cur_edge
= else_bb
->pred
;
2508 cur_edge
!= NULL_EDGE
;
2509 cur_edge
= cur_edge
->pred_next
)
2511 else_predecessors
++;
2512 if (cur_edge
->flags
& EDGE_COMPLEX
)
2516 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2517 other than any || blocks which jump to the THEN block. */
2518 if ((then_predecessors
- ce_info
->num_or_or_blocks
) != 1)
2521 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2522 if (then_succ
!= NULL_EDGE
2523 && (then_succ
->succ_next
!= NULL_EDGE
2524 || (then_succ
->flags
& EDGE_COMPLEX
)
2525 || (flow2_completed
&& tablejump_p (BB_END (then_bb
), NULL
, NULL
))))
2528 /* If the THEN block has no successors, conditional execution can still
2529 make a conditional call. Don't do this unless the ELSE block has
2530 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2531 Check for the last insn of the THEN block being an indirect jump, which
2532 is listed as not having any successors, but confuses the rest of the CE
2533 code processing. ??? we should fix this in the future. */
2534 if (then_succ
== NULL
)
2536 if (else_bb
->pred
->pred_next
== NULL_EDGE
)
2538 rtx last_insn
= BB_END (then_bb
);
2541 && NOTE_P (last_insn
)
2542 && last_insn
!= BB_HEAD (then_bb
))
2543 last_insn
= PREV_INSN (last_insn
);
2546 && JUMP_P (last_insn
)
2547 && ! simplejump_p (last_insn
))
2551 else_bb
= NULL_BLOCK
;
2557 /* If the THEN block's successor is the other edge out of the TEST block,
2558 then we have an IF-THEN combo without an ELSE. */
2559 else if (then_succ
->dest
== else_bb
)
2562 else_bb
= NULL_BLOCK
;
2565 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2566 has exactly one predecessor and one successor, and the outgoing edge
2567 is not complex, then we have an IF-THEN-ELSE combo. */
2568 else if (else_succ
!= NULL_EDGE
2569 && then_succ
->dest
== else_succ
->dest
2570 && else_bb
->pred
->pred_next
== NULL_EDGE
2571 && else_succ
->succ_next
== NULL_EDGE
2572 && ! (else_succ
->flags
& EDGE_COMPLEX
)
2573 && ! (flow2_completed
&& tablejump_p (BB_END (else_bb
), NULL
, NULL
)))
2574 join_bb
= else_succ
->dest
;
2576 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2580 num_possible_if_blocks
++;
2585 "\nIF-THEN%s block found, pass %d, start block %d "
2586 "[insn %d], then %d [%d]",
2587 (else_bb
) ? "-ELSE" : "",
2590 BB_HEAD (test_bb
) ? (int)INSN_UID (BB_HEAD (test_bb
)) : -1,
2592 BB_HEAD (then_bb
) ? (int)INSN_UID (BB_HEAD (then_bb
)) : -1);
2595 fprintf (dump_file
, ", else %d [%d]",
2597 BB_HEAD (else_bb
) ? (int)INSN_UID (BB_HEAD (else_bb
)) : -1);
2599 fprintf (dump_file
, ", join %d [%d]",
2601 BB_HEAD (join_bb
) ? (int)INSN_UID (BB_HEAD (join_bb
)) : -1);
2603 if (ce_info
->num_multiple_test_blocks
> 0)
2604 fprintf (dump_file
, ", %d %s block%s last test %d [%d]",
2605 ce_info
->num_multiple_test_blocks
,
2606 (ce_info
->and_and_p
) ? "&&" : "||",
2607 (ce_info
->num_multiple_test_blocks
== 1) ? "" : "s",
2608 ce_info
->last_test_bb
->index
,
2609 ((BB_HEAD (ce_info
->last_test_bb
))
2610 ? (int)INSN_UID (BB_HEAD (ce_info
->last_test_bb
))
2613 fputc ('\n', dump_file
);
2616 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2617 first condition for free, since we've already asserted that there's a
2618 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2619 we checked the FALLTHRU flag, those are already adjacent to the last IF
2621 /* ??? As an enhancement, move the ELSE block. Have to deal with
2622 BLOCK notes, if by no other means than aborting the merge if they
2623 exist. Sticky enough I don't want to think about it now. */
2625 if (else_bb
&& (next
= next
->next_bb
) != else_bb
)
2627 if ((next
= next
->next_bb
) != join_bb
&& join_bb
!= EXIT_BLOCK_PTR
)
2635 /* Do the real work. */
2636 ce_info
->else_bb
= else_bb
;
2637 ce_info
->join_bb
= join_bb
;
2639 return process_if_block (ce_info
);
2642 /* Convert a branch over a trap, or a branch
2643 to a trap, into a conditional trap. */
2646 find_cond_trap (basic_block test_bb
, edge then_edge
, edge else_edge
)
2648 basic_block then_bb
= then_edge
->dest
;
2649 basic_block else_bb
= else_edge
->dest
;
2650 basic_block other_bb
, trap_bb
;
2651 rtx trap
, jump
, cond
, cond_earliest
, seq
;
2654 /* Locate the block with the trap instruction. */
2655 /* ??? While we look for no successors, we really ought to allow
2656 EH successors. Need to fix merge_if_block for that to work. */
2657 if ((trap
= block_has_only_trap (then_bb
)) != NULL
)
2658 trap_bb
= then_bb
, other_bb
= else_bb
;
2659 else if ((trap
= block_has_only_trap (else_bb
)) != NULL
)
2660 trap_bb
= else_bb
, other_bb
= then_bb
;
2666 fprintf (dump_file
, "\nTRAP-IF block found, start %d, trap %d\n",
2667 test_bb
->index
, trap_bb
->index
);
2670 /* If this is not a standard conditional jump, we can't parse it. */
2671 jump
= BB_END (test_bb
);
2672 cond
= noce_get_condition (jump
, &cond_earliest
);
2676 /* If the conditional jump is more than just a conditional jump, then
2677 we can not do if-conversion on this block. */
2678 if (! onlyjump_p (jump
))
2681 /* We must be comparing objects whose modes imply the size. */
2682 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
2685 /* Reverse the comparison code, if necessary. */
2686 code
= GET_CODE (cond
);
2687 if (then_bb
== trap_bb
)
2689 code
= reversed_comparison_code (cond
, jump
);
2690 if (code
== UNKNOWN
)
2694 /* Attempt to generate the conditional trap. */
2695 seq
= gen_cond_trap (code
, XEXP (cond
, 0),
2697 TRAP_CODE (PATTERN (trap
)));
2703 /* Emit the new insns before cond_earliest. */
2704 emit_insn_before_setloc (seq
, cond_earliest
, INSN_LOCATOR (trap
));
2706 /* Delete the trap block if possible. */
2707 remove_edge (trap_bb
== then_bb
? then_edge
: else_edge
);
2708 if (trap_bb
->pred
== NULL
)
2709 delete_basic_block (trap_bb
);
2711 /* If the non-trap block and the test are now adjacent, merge them.
2712 Otherwise we must insert a direct branch. */
2713 if (test_bb
->next_bb
== other_bb
)
2715 struct ce_if_block new_ce_info
;
2717 memset (&new_ce_info
, '\0', sizeof (new_ce_info
));
2718 new_ce_info
.test_bb
= test_bb
;
2719 new_ce_info
.then_bb
= NULL
;
2720 new_ce_info
.else_bb
= NULL
;
2721 new_ce_info
.join_bb
= other_bb
;
2722 merge_if_block (&new_ce_info
);
2728 lab
= JUMP_LABEL (jump
);
2729 newjump
= emit_jump_insn_after (gen_jump (lab
), jump
);
2730 LABEL_NUSES (lab
) += 1;
2731 JUMP_LABEL (newjump
) = lab
;
2732 emit_barrier_after (newjump
);
2740 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2744 block_has_only_trap (basic_block bb
)
2748 /* We're not the exit block. */
2749 if (bb
== EXIT_BLOCK_PTR
)
2752 /* The block must have no successors. */
2756 /* The only instruction in the THEN block must be the trap. */
2757 trap
= first_active_insn (bb
);
2758 if (! (trap
== BB_END (bb
)
2759 && GET_CODE (PATTERN (trap
)) == TRAP_IF
2760 && TRAP_CONDITION (PATTERN (trap
)) == const_true_rtx
))
2766 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2767 transformable, but not necessarily the other. There need be no
2770 Return TRUE if we were successful at converting the block.
2772 Cases we'd like to look at:
2775 if (test) goto over; // x not live
2783 if (! test) goto label;
2786 if (test) goto E; // x not live
2800 (3) // This one's really only interesting for targets that can do
2801 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2802 // it results in multiple branches on a cache line, which often
2803 // does not sit well with predictors.
2805 if (test1) goto E; // predicted not taken
2821 (A) Don't do (2) if the branch is predicted against the block we're
2822 eliminating. Do it anyway if we can eliminate a branch; this requires
2823 that the sole successor of the eliminated block postdominate the other
2826 (B) With CE, on (3) we can steal from both sides of the if, creating
2835 Again, this is most useful if J postdominates.
2837 (C) CE substitutes for helpful life information.
2839 (D) These heuristics need a lot of work. */
2841 /* Tests for case 1 above. */
2844 find_if_case_1 (basic_block test_bb
, edge then_edge
, edge else_edge
)
2846 basic_block then_bb
= then_edge
->dest
;
2847 basic_block else_bb
= else_edge
->dest
, new_bb
;
2848 edge then_succ
= then_bb
->succ
;
2849 int then_bb_index
, bb_cost
;
2851 /* If we are partitioning hot/cold basic blocks, we don't want to
2852 mess up unconditional or indirect jumps that cross between hot
2853 and cold sections. */
2855 if (flag_reorder_blocks_and_partition
2856 && ((BB_END (then_bb
)
2857 && find_reg_note (BB_END (then_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
2858 || (BB_END (else_bb
)
2859 && find_reg_note (BB_END (else_bb
), REG_CROSSING_JUMP
,
2863 /* THEN has one successor. */
2864 if (!then_succ
|| then_succ
->succ_next
!= NULL
)
2867 /* THEN does not fall through, but is not strange either. */
2868 if (then_succ
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
))
2871 /* THEN has one predecessor. */
2872 if (then_bb
->pred
->pred_next
!= NULL
)
2875 /* THEN must do something. */
2876 if (forwarder_block_p (then_bb
))
2879 num_possible_if_blocks
++;
2882 "\nIF-CASE-1 found, start %d, then %d\n",
2883 test_bb
->index
, then_bb
->index
);
2885 /* THEN is small. */
2886 bb_cost
= total_bb_rtx_cost (then_bb
);
2887 if (bb_cost
< 0 || bb_cost
>= COSTS_N_INSNS (BRANCH_COST
))
2890 /* Registers set are dead, or are predicable. */
2891 if (! dead_or_predicable (test_bb
, then_bb
, else_bb
,
2892 then_bb
->succ
->dest
, 1))
2895 /* Conversion went ok, including moving the insns and fixing up the
2896 jump. Adjust the CFG to match. */
2898 bitmap_operation (test_bb
->global_live_at_end
,
2899 else_bb
->global_live_at_start
,
2900 then_bb
->global_live_at_end
, BITMAP_IOR
);
2902 new_bb
= redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb
), else_bb
);
2903 then_bb_index
= then_bb
->index
;
2904 delete_basic_block (then_bb
);
2906 /* Make rest of code believe that the newly created block is the THEN_BB
2907 block we removed. */
2910 new_bb
->index
= then_bb_index
;
2911 BASIC_BLOCK (then_bb_index
) = new_bb
;
2912 new_bb
->partition
= test_bb
->partition
;
2914 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2918 num_updated_if_blocks
++;
2923 /* Test for case 2 above. */
2926 find_if_case_2 (basic_block test_bb
, edge then_edge
, edge else_edge
)
2928 basic_block then_bb
= then_edge
->dest
;
2929 basic_block else_bb
= else_edge
->dest
;
2930 edge else_succ
= else_bb
->succ
;
2934 /* If we are partitioning hot/cold basic blocks, we don't want to
2935 mess up unconditional or indirect jumps that cross between hot
2936 and cold sections. */
2938 if (flag_reorder_blocks_and_partition
2939 && ((BB_END (then_bb
)
2940 && find_reg_note (BB_END (then_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
2941 || (BB_END (else_bb
)
2942 && find_reg_note (BB_END (else_bb
), REG_CROSSING_JUMP
,
2946 /* ELSE has one successor. */
2947 if (!else_succ
|| else_succ
->succ_next
!= NULL
)
2950 /* ELSE outgoing edge is not complex. */
2951 if (else_succ
->flags
& EDGE_COMPLEX
)
2954 /* ELSE has one predecessor. */
2955 if (else_bb
->pred
->pred_next
!= NULL
)
2958 /* THEN is not EXIT. */
2959 if (then_bb
->index
< 0)
2962 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2963 note
= find_reg_note (BB_END (test_bb
), REG_BR_PROB
, NULL_RTX
);
2964 if (note
&& INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2)
2966 else if (else_succ
->dest
->index
< 0
2967 || dominated_by_p (CDI_POST_DOMINATORS
, then_bb
,
2973 num_possible_if_blocks
++;
2976 "\nIF-CASE-2 found, start %d, else %d\n",
2977 test_bb
->index
, else_bb
->index
);
2979 /* ELSE is small. */
2980 bb_cost
= total_bb_rtx_cost (else_bb
);
2981 if (bb_cost
< 0 || bb_cost
>= COSTS_N_INSNS (BRANCH_COST
))
2984 /* Registers set are dead, or are predicable. */
2985 if (! dead_or_predicable (test_bb
, else_bb
, then_bb
, else_succ
->dest
, 0))
2988 /* Conversion went ok, including moving the insns and fixing up the
2989 jump. Adjust the CFG to match. */
2991 bitmap_operation (test_bb
->global_live_at_end
,
2992 then_bb
->global_live_at_start
,
2993 else_bb
->global_live_at_end
, BITMAP_IOR
);
2995 delete_basic_block (else_bb
);
2998 num_updated_if_blocks
++;
3000 /* ??? We may now fallthru from one of THEN's successors into a join
3001 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3006 /* A subroutine of dead_or_predicable called through for_each_rtx.
3007 Return 1 if a memory is found. */
3010 find_memory (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3015 /* Used by the code above to perform the actual rtl transformations.
3016 Return TRUE if successful.
3018 TEST_BB is the block containing the conditional branch. MERGE_BB
3019 is the block containing the code to manipulate. NEW_DEST is the
3020 label TEST_BB should be branching to after the conversion.
3021 REVERSEP is true if the sense of the branch should be reversed. */
3024 dead_or_predicable (basic_block test_bb
, basic_block merge_bb
,
3025 basic_block other_bb
, basic_block new_dest
, int reversep
)
3027 rtx head
, end
, jump
, earliest
= NULL_RTX
, old_dest
, new_label
= NULL_RTX
;
3029 jump
= BB_END (test_bb
);
3031 /* Find the extent of the real code in the merge block. */
3032 head
= BB_HEAD (merge_bb
);
3033 end
= BB_END (merge_bb
);
3036 head
= NEXT_INSN (head
);
3041 head
= end
= NULL_RTX
;
3044 head
= NEXT_INSN (head
);
3051 head
= end
= NULL_RTX
;
3054 end
= PREV_INSN (end
);
3057 /* Disable handling dead code by conditional execution if the machine needs
3058 to do anything funny with the tests, etc. */
3059 #ifndef IFCVT_MODIFY_TESTS
3060 if (HAVE_conditional_execution
)
3062 /* In the conditional execution case, we have things easy. We know
3063 the condition is reversible. We don't have to check life info
3064 because we're going to conditionally execute the code anyway.
3065 All that's left is making sure the insns involved can actually
3070 cond
= cond_exec_get_condition (jump
);
3074 prob_val
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
3076 prob_val
= XEXP (prob_val
, 0);
3080 enum rtx_code rev
= reversed_comparison_code (cond
, jump
);
3083 cond
= gen_rtx_fmt_ee (rev
, GET_MODE (cond
), XEXP (cond
, 0),
3086 prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (prob_val
));
3089 if (! cond_exec_process_insns ((ce_if_block_t
*)0, head
, end
, cond
,
3098 /* In the non-conditional execution case, we have to verify that there
3099 are no trapping operations, no calls, no references to memory, and
3100 that any registers modified are dead at the branch site. */
3102 rtx insn
, cond
, prev
;
3103 regset_head merge_set_head
, tmp_head
, test_live_head
, test_set_head
;
3104 regset merge_set
, tmp
, test_live
, test_set
;
3105 struct propagate_block_info
*pbi
;
3108 /* Check for no calls or trapping operations. */
3109 for (insn
= head
; ; insn
= NEXT_INSN (insn
))
3115 if (may_trap_p (PATTERN (insn
)))
3118 /* ??? Even non-trapping memories such as stack frame
3119 references must be avoided. For stores, we collect
3120 no lifetime info; for reads, we'd have to assert
3121 true_dependence false against every store in the
3123 if (for_each_rtx (&PATTERN (insn
), find_memory
, NULL
))
3130 if (! any_condjump_p (jump
))
3133 /* Find the extent of the conditional. */
3134 cond
= noce_get_condition (jump
, &earliest
);
3139 MERGE_SET = set of registers set in MERGE_BB
3140 TEST_LIVE = set of registers live at EARLIEST
3141 TEST_SET = set of registers set between EARLIEST and the
3142 end of the block. */
3144 tmp
= INITIALIZE_REG_SET (tmp_head
);
3145 merge_set
= INITIALIZE_REG_SET (merge_set_head
);
3146 test_live
= INITIALIZE_REG_SET (test_live_head
);
3147 test_set
= INITIALIZE_REG_SET (test_set_head
);
3149 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3150 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3151 since we've already asserted that MERGE_BB is small. */
3152 propagate_block (merge_bb
, tmp
, merge_set
, merge_set
, 0);
3154 /* For small register class machines, don't lengthen lifetimes of
3155 hard registers before reload. */
3156 if (SMALL_REGISTER_CLASSES
&& ! reload_completed
)
3158 EXECUTE_IF_SET_IN_BITMAP
3161 if (i
< FIRST_PSEUDO_REGISTER
3163 && ! global_regs
[i
])
3168 /* For TEST, we're interested in a range of insns, not a whole block.
3169 Moreover, we're interested in the insns live from OTHER_BB. */
3171 COPY_REG_SET (test_live
, other_bb
->global_live_at_start
);
3172 pbi
= init_propagate_block_info (test_bb
, test_live
, test_set
, test_set
,
3175 for (insn
= jump
; ; insn
= prev
)
3177 prev
= propagate_one_insn (pbi
, insn
);
3178 if (insn
== earliest
)
3182 free_propagate_block_info (pbi
);
3184 /* We can perform the transformation if
3185 MERGE_SET & (TEST_SET | TEST_LIVE)
3187 TEST_SET & merge_bb->global_live_at_start
3190 bitmap_operation (tmp
, test_set
, test_live
, BITMAP_IOR
);
3191 bitmap_operation (tmp
, tmp
, merge_set
, BITMAP_AND
);
3192 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
3194 bitmap_operation (tmp
, test_set
, merge_bb
->global_live_at_start
,
3196 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
3199 FREE_REG_SET (merge_set
);
3200 FREE_REG_SET (test_live
);
3201 FREE_REG_SET (test_set
);
3208 /* We don't want to use normal invert_jump or redirect_jump because
3209 we don't want to delete_insn called. Also, we want to do our own
3210 change group management. */
3212 old_dest
= JUMP_LABEL (jump
);
3213 if (other_bb
!= new_dest
)
3215 new_label
= block_label (new_dest
);
3217 ? ! invert_jump_1 (jump
, new_label
)
3218 : ! redirect_jump_1 (jump
, new_label
))
3222 if (! apply_change_group ())
3225 if (other_bb
!= new_dest
)
3228 LABEL_NUSES (old_dest
) -= 1;
3230 LABEL_NUSES (new_label
) += 1;
3231 JUMP_LABEL (jump
) = new_label
;
3233 invert_br_probabilities (jump
);
3235 redirect_edge_succ (BRANCH_EDGE (test_bb
), new_dest
);
3238 gcov_type count
, probability
;
3239 count
= BRANCH_EDGE (test_bb
)->count
;
3240 BRANCH_EDGE (test_bb
)->count
= FALLTHRU_EDGE (test_bb
)->count
;
3241 FALLTHRU_EDGE (test_bb
)->count
= count
;
3242 probability
= BRANCH_EDGE (test_bb
)->probability
;
3243 BRANCH_EDGE (test_bb
)->probability
3244 = FALLTHRU_EDGE (test_bb
)->probability
;
3245 FALLTHRU_EDGE (test_bb
)->probability
= probability
;
3246 update_br_prob_note (test_bb
);
3250 /* Move the insns out of MERGE_BB to before the branch. */
3253 if (end
== BB_END (merge_bb
))
3254 BB_END (merge_bb
) = PREV_INSN (head
);
3256 if (squeeze_notes (&head
, &end
))
3259 reorder_insns (head
, end
, PREV_INSN (earliest
));
3262 /* Remove the jump and edge if we can. */
3263 if (other_bb
== new_dest
)
3266 remove_edge (BRANCH_EDGE (test_bb
));
3267 /* ??? Can't merge blocks here, as then_bb is still in use.
3268 At minimum, the merge will get done just before bb-reorder. */
3278 /* Main entry point for all if-conversion. */
3281 if_convert (int x_life_data_ok
)
3286 num_possible_if_blocks
= 0;
3287 num_updated_if_blocks
= 0;
3288 num_true_changes
= 0;
3289 life_data_ok
= (x_life_data_ok
!= 0);
3291 if ((! targetm
.cannot_modify_jumps_p ())
3292 && (!flag_reorder_blocks_and_partition
|| !no_new_pseudos
3293 || !targetm
.have_named_sections
))
3294 mark_loop_exit_edges ();
3296 /* Compute postdominators if we think we'll use them. */
3297 if (HAVE_conditional_execution
|| life_data_ok
)
3298 calculate_dominance_info (CDI_POST_DOMINATORS
);
3303 /* Go through each of the basic blocks looking for things to convert. If we
3304 have conditional execution, we make multiple passes to allow us to handle
3305 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3309 cond_exec_changed_p
= FALSE
;
3312 #ifdef IFCVT_MULTIPLE_DUMPS
3313 if (dump_file
&& pass
> 1)
3314 fprintf (dump_file
, "\n\n========== Pass %d ==========\n", pass
);
3320 while ((new_bb
= find_if_header (bb
, pass
)))
3324 #ifdef IFCVT_MULTIPLE_DUMPS
3325 if (dump_file
&& cond_exec_changed_p
)
3326 print_rtl_with_bb (dump_file
, get_insns ());
3329 while (cond_exec_changed_p
);
3331 #ifdef IFCVT_MULTIPLE_DUMPS
3333 fprintf (dump_file
, "\n\n========== no more changes\n");
3336 free_dominance_info (CDI_POST_DOMINATORS
);
3341 clear_aux_for_blocks ();
3343 /* Rebuild life info for basic blocks that require it. */
3344 if (num_true_changes
&& life_data_ok
)
3346 /* If we allocated new pseudos, we must resize the array for sched1. */
3347 if (max_regno
< max_reg_num ())
3349 max_regno
= max_reg_num ();
3350 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3352 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES
,
3353 PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
3354 | PROP_KILL_DEAD_CODE
);
3357 /* Write the final stats. */
3358 if (dump_file
&& num_possible_if_blocks
> 0)
3361 "\n%d possible IF blocks searched.\n",
3362 num_possible_if_blocks
);
3364 "%d IF blocks converted.\n",
3365 num_updated_if_blocks
);
3367 "%d true changes made.\n\n\n",
3371 #ifdef ENABLE_CHECKING
3372 verify_flow_info ();