1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "insn-config.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
44 #include "tree-pass.h"
50 #ifndef HAVE_conditional_execution
51 #define HAVE_conditional_execution 0
53 #ifndef HAVE_conditional_move
54 #define HAVE_conditional_move 0
66 #ifndef MAX_CONDITIONAL_EXECUTE
67 #define MAX_CONDITIONAL_EXECUTE \
68 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
72 #define IFCVT_MULTIPLE_DUMPS 1
74 #define NULL_BLOCK ((basic_block) NULL)
76 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
77 static int num_possible_if_blocks
;
79 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
81 static int num_updated_if_blocks
;
83 /* # of changes made. */
84 static int num_true_changes
;
86 /* Whether conditional execution changes were made. */
87 static int cond_exec_changed_p
;
89 /* Forward references. */
90 static int count_bb_insns (const_basic_block
);
91 static bool cheap_bb_rtx_cost_p (const_basic_block
, int);
92 static rtx
first_active_insn (basic_block
);
93 static rtx
last_active_insn (basic_block
, int);
94 static basic_block
block_fallthru (basic_block
);
95 static int cond_exec_process_insns (ce_if_block_t
*, rtx
, rtx
, rtx
, rtx
, int);
96 static rtx
cond_exec_get_condition (rtx
);
97 static rtx
noce_get_condition (rtx
, rtx
*, bool);
98 static int noce_operand_ok (const_rtx
);
99 static void merge_if_block (ce_if_block_t
*);
100 static int find_cond_trap (basic_block
, edge
, edge
);
101 static basic_block
find_if_header (basic_block
, int);
102 static int block_jumps_and_fallthru_p (basic_block
, basic_block
);
103 static int noce_find_if_block (basic_block
, edge
, edge
, int);
104 static int cond_exec_find_if_block (ce_if_block_t
*);
105 static int find_if_case_1 (basic_block
, edge
, edge
);
106 static int find_if_case_2 (basic_block
, edge
, edge
);
107 static int find_memory (rtx
*, void *);
108 static int dead_or_predicable (basic_block
, basic_block
, basic_block
,
110 static void noce_emit_move_insn (rtx
, rtx
);
111 static rtx
block_has_only_trap (basic_block
);
113 /* Count the number of non-jump active insns in BB. */
116 count_bb_insns (const_basic_block bb
)
119 rtx insn
= BB_HEAD (bb
);
123 if (CALL_P (insn
) || NONJUMP_INSN_P (insn
))
126 if (insn
== BB_END (bb
))
128 insn
= NEXT_INSN (insn
);
134 /* Determine whether the total insn_rtx_cost on non-jump insns in
135 basic block BB is less than MAX_COST. This function returns
136 false if the cost of any instruction could not be estimated. */
139 cheap_bb_rtx_cost_p (const_basic_block bb
, int max_cost
)
142 rtx insn
= BB_HEAD (bb
);
143 bool speed
= optimize_bb_for_speed_p (bb
);
147 if (NONJUMP_INSN_P (insn
))
149 int cost
= insn_rtx_cost (PATTERN (insn
), speed
);
153 /* If this instruction is the load or set of a "stack" register,
154 such as a floating point register on x87, then the cost of
155 speculatively executing this insn may need to include
156 the additional cost of popping its result off of the
157 register stack. Unfortunately, correctly recognizing and
158 accounting for this additional overhead is tricky, so for
159 now we simply prohibit such speculative execution. */
162 rtx set
= single_set (insn
);
163 if (set
&& STACK_REG_P (SET_DEST (set
)))
169 if (count
>= max_cost
)
172 else if (CALL_P (insn
))
175 if (insn
== BB_END (bb
))
177 insn
= NEXT_INSN (insn
);
183 /* Return the first non-jump active insn in the basic block. */
186 first_active_insn (basic_block bb
)
188 rtx insn
= BB_HEAD (bb
);
192 if (insn
== BB_END (bb
))
194 insn
= NEXT_INSN (insn
);
197 while (NOTE_P (insn
))
199 if (insn
== BB_END (bb
))
201 insn
= NEXT_INSN (insn
);
210 /* Return the last non-jump active (non-jump) insn in the basic block. */
213 last_active_insn (basic_block bb
, int skip_use_p
)
215 rtx insn
= BB_END (bb
);
216 rtx head
= BB_HEAD (bb
);
221 && NONJUMP_INSN_P (insn
)
222 && GET_CODE (PATTERN (insn
)) == USE
))
226 insn
= PREV_INSN (insn
);
235 /* Return the basic block reached by falling though the basic block BB. */
238 block_fallthru (basic_block bb
)
243 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
244 if (e
->flags
& EDGE_FALLTHRU
)
247 return (e
) ? e
->dest
: NULL_BLOCK
;
250 /* Go through a bunch of insns, converting them to conditional
251 execution format if possible. Return TRUE if all of the non-note
252 insns were processed. */
255 cond_exec_process_insns (ce_if_block_t
*ce_info ATTRIBUTE_UNUSED
,
256 /* if block information */rtx start
,
257 /* first insn to look at */rtx end
,
258 /* last insn to look at */rtx test
,
259 /* conditional execution test */rtx prob_val
,
260 /* probability of branch taken. */int mod_ok
)
262 int must_be_last
= FALSE
;
270 for (insn
= start
; ; insn
= NEXT_INSN (insn
))
275 gcc_assert(NONJUMP_INSN_P (insn
) || CALL_P (insn
));
277 /* Remove USE insns that get in the way. */
278 if (reload_completed
&& GET_CODE (PATTERN (insn
)) == USE
)
280 /* ??? Ug. Actually unlinking the thing is problematic,
281 given what we'd have to coordinate with our callers. */
282 SET_INSN_DELETED (insn
);
286 /* Last insn wasn't last? */
290 if (modified_in_p (test
, insn
))
297 /* Now build the conditional form of the instruction. */
298 pattern
= PATTERN (insn
);
299 xtest
= copy_rtx (test
);
301 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
303 if (GET_CODE (pattern
) == COND_EXEC
)
305 if (GET_MODE (xtest
) != GET_MODE (COND_EXEC_TEST (pattern
)))
308 xtest
= gen_rtx_AND (GET_MODE (xtest
), xtest
,
309 COND_EXEC_TEST (pattern
));
310 pattern
= COND_EXEC_CODE (pattern
);
313 pattern
= gen_rtx_COND_EXEC (VOIDmode
, xtest
, pattern
);
315 /* If the machine needs to modify the insn being conditionally executed,
316 say for example to force a constant integer operand into a temp
317 register, do so here. */
318 #ifdef IFCVT_MODIFY_INSN
319 IFCVT_MODIFY_INSN (ce_info
, pattern
, insn
);
324 validate_change (insn
, &PATTERN (insn
), pattern
, 1);
326 if (CALL_P (insn
) && prob_val
)
327 validate_change (insn
, ®_NOTES (insn
),
328 alloc_EXPR_LIST (REG_BR_PROB
, prob_val
,
329 REG_NOTES (insn
)), 1);
339 /* Return the condition for a jump. Do not do any special processing. */
342 cond_exec_get_condition (rtx jump
)
346 if (any_condjump_p (jump
))
347 test_if
= SET_SRC (pc_set (jump
));
350 cond
= XEXP (test_if
, 0);
352 /* If this branches to JUMP_LABEL when the condition is false,
353 reverse the condition. */
354 if (GET_CODE (XEXP (test_if
, 2)) == LABEL_REF
355 && XEXP (XEXP (test_if
, 2), 0) == JUMP_LABEL (jump
))
357 enum rtx_code rev
= reversed_comparison_code (cond
, jump
);
361 cond
= gen_rtx_fmt_ee (rev
, GET_MODE (cond
), XEXP (cond
, 0),
368 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
369 to conditional execution. Return TRUE if we were successful at
370 converting the block. */
373 cond_exec_process_if_block (ce_if_block_t
* ce_info
,
374 /* if block information */int do_multiple_p
)
376 basic_block test_bb
= ce_info
->test_bb
; /* last test block */
377 basic_block then_bb
= ce_info
->then_bb
; /* THEN */
378 basic_block else_bb
= ce_info
->else_bb
; /* ELSE or NULL */
379 rtx test_expr
; /* expression in IF_THEN_ELSE that is tested */
380 rtx then_start
; /* first insn in THEN block */
381 rtx then_end
; /* last insn + 1 in THEN block */
382 rtx else_start
= NULL_RTX
; /* first insn in ELSE block or NULL */
383 rtx else_end
= NULL_RTX
; /* last insn + 1 in ELSE block */
384 int max
; /* max # of insns to convert. */
385 int then_mod_ok
; /* whether conditional mods are ok in THEN */
386 rtx true_expr
; /* test for else block insns */
387 rtx false_expr
; /* test for then block insns */
388 rtx true_prob_val
; /* probability of else block */
389 rtx false_prob_val
; /* probability of then block */
391 enum rtx_code false_code
;
393 /* If test is comprised of && or || elements, and we've failed at handling
394 all of them together, just use the last test if it is the special case of
395 && elements without an ELSE block. */
396 if (!do_multiple_p
&& ce_info
->num_multiple_test_blocks
)
398 if (else_bb
|| ! ce_info
->and_and_p
)
401 ce_info
->test_bb
= test_bb
= ce_info
->last_test_bb
;
402 ce_info
->num_multiple_test_blocks
= 0;
403 ce_info
->num_and_and_blocks
= 0;
404 ce_info
->num_or_or_blocks
= 0;
407 /* Find the conditional jump to the ELSE or JOIN part, and isolate
409 test_expr
= cond_exec_get_condition (BB_END (test_bb
));
413 /* If the conditional jump is more than just a conditional jump,
414 then we can not do conditional execution conversion on this block. */
415 if (! onlyjump_p (BB_END (test_bb
)))
418 /* Collect the bounds of where we're to search, skipping any labels, jumps
419 and notes at the beginning and end of the block. Then count the total
420 number of insns and see if it is small enough to convert. */
421 then_start
= first_active_insn (then_bb
);
422 then_end
= last_active_insn (then_bb
, TRUE
);
423 n_insns
= ce_info
->num_then_insns
= count_bb_insns (then_bb
);
424 max
= MAX_CONDITIONAL_EXECUTE
;
429 else_start
= first_active_insn (else_bb
);
430 else_end
= last_active_insn (else_bb
, TRUE
);
431 n_insns
+= ce_info
->num_else_insns
= count_bb_insns (else_bb
);
437 /* Map test_expr/test_jump into the appropriate MD tests to use on
438 the conditionally executed code. */
440 true_expr
= test_expr
;
442 false_code
= reversed_comparison_code (true_expr
, BB_END (test_bb
));
443 if (false_code
!= UNKNOWN
)
444 false_expr
= gen_rtx_fmt_ee (false_code
, GET_MODE (true_expr
),
445 XEXP (true_expr
, 0), XEXP (true_expr
, 1));
447 false_expr
= NULL_RTX
;
449 #ifdef IFCVT_MODIFY_TESTS
450 /* If the machine description needs to modify the tests, such as setting a
451 conditional execution register from a comparison, it can do so here. */
452 IFCVT_MODIFY_TESTS (ce_info
, true_expr
, false_expr
);
454 /* See if the conversion failed. */
455 if (!true_expr
|| !false_expr
)
459 true_prob_val
= find_reg_note (BB_END (test_bb
), REG_BR_PROB
, NULL_RTX
);
462 true_prob_val
= XEXP (true_prob_val
, 0);
463 false_prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (true_prob_val
));
466 false_prob_val
= NULL_RTX
;
468 /* If we have && or || tests, do them here. These tests are in the adjacent
469 blocks after the first block containing the test. */
470 if (ce_info
->num_multiple_test_blocks
> 0)
472 basic_block bb
= test_bb
;
473 basic_block last_test_bb
= ce_info
->last_test_bb
;
482 enum rtx_code f_code
;
484 bb
= block_fallthru (bb
);
485 start
= first_active_insn (bb
);
486 end
= last_active_insn (bb
, TRUE
);
488 && ! cond_exec_process_insns (ce_info
, start
, end
, false_expr
,
489 false_prob_val
, FALSE
))
492 /* If the conditional jump is more than just a conditional jump, then
493 we can not do conditional execution conversion on this block. */
494 if (! onlyjump_p (BB_END (bb
)))
497 /* Find the conditional jump and isolate the test. */
498 t
= cond_exec_get_condition (BB_END (bb
));
502 f_code
= reversed_comparison_code (t
, BB_END (bb
));
503 if (f_code
== UNKNOWN
)
506 f
= gen_rtx_fmt_ee (f_code
, GET_MODE (t
), XEXP (t
, 0), XEXP (t
, 1));
507 if (ce_info
->and_and_p
)
509 t
= gen_rtx_AND (GET_MODE (t
), true_expr
, t
);
510 f
= gen_rtx_IOR (GET_MODE (t
), false_expr
, f
);
514 t
= gen_rtx_IOR (GET_MODE (t
), true_expr
, t
);
515 f
= gen_rtx_AND (GET_MODE (t
), false_expr
, f
);
518 /* If the machine description needs to modify the tests, such as
519 setting a conditional execution register from a comparison, it can
521 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
522 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info
, bb
, t
, f
);
524 /* See if the conversion failed. */
532 while (bb
!= last_test_bb
);
535 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
536 on then THEN block. */
537 then_mod_ok
= (else_bb
== NULL_BLOCK
);
539 /* Go through the THEN and ELSE blocks converting the insns if possible
540 to conditional execution. */
544 || ! cond_exec_process_insns (ce_info
, then_start
, then_end
,
545 false_expr
, false_prob_val
,
549 if (else_bb
&& else_end
550 && ! cond_exec_process_insns (ce_info
, else_start
, else_end
,
551 true_expr
, true_prob_val
, TRUE
))
554 /* If we cannot apply the changes, fail. Do not go through the normal fail
555 processing, since apply_change_group will call cancel_changes. */
556 if (! apply_change_group ())
558 #ifdef IFCVT_MODIFY_CANCEL
559 /* Cancel any machine dependent changes. */
560 IFCVT_MODIFY_CANCEL (ce_info
);
565 #ifdef IFCVT_MODIFY_FINAL
566 /* Do any machine dependent final modifications. */
567 IFCVT_MODIFY_FINAL (ce_info
);
570 /* Conversion succeeded. */
572 fprintf (dump_file
, "%d insn%s converted to conditional execution.\n",
573 n_insns
, (n_insns
== 1) ? " was" : "s were");
575 /* Merge the blocks! */
576 merge_if_block (ce_info
);
577 cond_exec_changed_p
= TRUE
;
581 #ifdef IFCVT_MODIFY_CANCEL
582 /* Cancel any machine dependent changes. */
583 IFCVT_MODIFY_CANCEL (ce_info
);
590 /* Used by noce_process_if_block to communicate with its subroutines.
592 The subroutines know that A and B may be evaluated freely. They
593 know that X is a register. They should insert new instructions
594 before cond_earliest. */
598 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
599 basic_block test_bb
, then_bb
, else_bb
, join_bb
;
601 /* The jump that ends TEST_BB. */
604 /* The jump condition. */
607 /* New insns should be inserted before this one. */
610 /* Insns in the THEN and ELSE block. There is always just this
611 one insns in those blocks. The insns are single_set insns.
612 If there was no ELSE block, INSN_B is the last insn before
613 COND_EARLIEST, or NULL_RTX. In the former case, the insn
614 operands are still valid, as if INSN_B was moved down below
618 /* The SET_SRC of INSN_A and INSN_B. */
621 /* The SET_DEST of INSN_A. */
624 /* True if this if block is not canonical. In the canonical form of
625 if blocks, the THEN_BB is the block reached via the fallthru edge
626 from TEST_BB. For the noce transformations, we allow the symmetric
628 bool then_else_reversed
;
630 /* Estimated cost of the particular branch instruction. */
634 static rtx
noce_emit_store_flag (struct noce_if_info
*, rtx
, int, int);
635 static int noce_try_move (struct noce_if_info
*);
636 static int noce_try_store_flag (struct noce_if_info
*);
637 static int noce_try_addcc (struct noce_if_info
*);
638 static int noce_try_store_flag_constants (struct noce_if_info
*);
639 static int noce_try_store_flag_mask (struct noce_if_info
*);
640 static rtx
noce_emit_cmove (struct noce_if_info
*, rtx
, enum rtx_code
, rtx
,
642 static int noce_try_cmove (struct noce_if_info
*);
643 static int noce_try_cmove_arith (struct noce_if_info
*);
644 static rtx
noce_get_alt_condition (struct noce_if_info
*, rtx
, rtx
*);
645 static int noce_try_minmax (struct noce_if_info
*);
646 static int noce_try_abs (struct noce_if_info
*);
647 static int noce_try_sign_mask (struct noce_if_info
*);
649 /* Helper function for noce_try_store_flag*. */
652 noce_emit_store_flag (struct noce_if_info
*if_info
, rtx x
, int reversep
,
655 rtx cond
= if_info
->cond
;
659 cond_complex
= (! general_operand (XEXP (cond
, 0), VOIDmode
)
660 || ! general_operand (XEXP (cond
, 1), VOIDmode
));
662 /* If earliest == jump, or when the condition is complex, try to
663 build the store_flag insn directly. */
667 rtx set
= pc_set (if_info
->jump
);
668 cond
= XEXP (SET_SRC (set
), 0);
669 if (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
670 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (if_info
->jump
))
671 reversep
= !reversep
;
672 if (if_info
->then_else_reversed
)
673 reversep
= !reversep
;
677 code
= reversed_comparison_code (cond
, if_info
->jump
);
679 code
= GET_CODE (cond
);
681 if ((if_info
->cond_earliest
== if_info
->jump
|| cond_complex
)
682 && (normalize
== 0 || STORE_FLAG_VALUE
== normalize
))
686 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (cond
, 0),
688 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
691 tmp
= emit_insn (tmp
);
693 if (recog_memoized (tmp
) >= 0)
699 if_info
->cond_earliest
= if_info
->jump
;
707 /* Don't even try if the comparison operands or the mode of X are weird. */
708 if (cond_complex
|| !SCALAR_INT_MODE_P (GET_MODE (x
)))
711 return emit_store_flag (x
, code
, XEXP (cond
, 0),
712 XEXP (cond
, 1), VOIDmode
,
713 (code
== LTU
|| code
== LEU
714 || code
== GEU
|| code
== GTU
), normalize
);
717 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
718 X is the destination/target and Y is the value to copy. */
721 noce_emit_move_insn (rtx x
, rtx y
)
723 enum machine_mode outmode
;
727 if (GET_CODE (x
) != STRICT_LOW_PART
)
729 rtx seq
, insn
, target
;
733 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
734 otherwise construct a suitable SET pattern ourselves. */
735 insn
= (OBJECT_P (y
) || CONSTANT_P (y
) || GET_CODE (y
) == SUBREG
)
736 ? emit_move_insn (x
, y
)
737 : emit_insn (gen_rtx_SET (VOIDmode
, x
, y
));
741 if (recog_memoized (insn
) <= 0)
743 if (GET_CODE (x
) == ZERO_EXTRACT
)
745 rtx op
= XEXP (x
, 0);
746 unsigned HOST_WIDE_INT size
= INTVAL (XEXP (x
, 1));
747 unsigned HOST_WIDE_INT start
= INTVAL (XEXP (x
, 2));
749 /* store_bit_field expects START to be relative to
750 BYTES_BIG_ENDIAN and adjusts this value for machines with
751 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
752 invoke store_bit_field again it is necessary to have the START
753 value from the first call. */
754 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
757 start
= BITS_PER_UNIT
- start
- size
;
760 gcc_assert (REG_P (op
));
761 start
= BITS_PER_WORD
- start
- size
;
765 gcc_assert (start
< (MEM_P (op
) ? BITS_PER_UNIT
: BITS_PER_WORD
));
766 store_bit_field (op
, size
, start
, GET_MODE (x
), y
);
770 switch (GET_RTX_CLASS (GET_CODE (y
)))
773 ot
= code_to_optab
[GET_CODE (y
)];
777 target
= expand_unop (GET_MODE (y
), ot
, XEXP (y
, 0), x
, 0);
778 if (target
!= NULL_RTX
)
781 emit_move_insn (x
, target
);
790 ot
= code_to_optab
[GET_CODE (y
)];
794 target
= expand_binop (GET_MODE (y
), ot
,
795 XEXP (y
, 0), XEXP (y
, 1),
797 if (target
!= NULL_RTX
)
800 emit_move_insn (x
, target
);
817 inner
= XEXP (outer
, 0);
818 outmode
= GET_MODE (outer
);
819 bitpos
= SUBREG_BYTE (outer
) * BITS_PER_UNIT
;
820 store_bit_field (inner
, GET_MODE_BITSIZE (outmode
), bitpos
, outmode
, y
);
823 /* Return sequence of instructions generated by if conversion. This
824 function calls end_sequence() to end the current stream, ensures
825 that are instructions are unshared, recognizable non-jump insns.
826 On failure, this function returns a NULL_RTX. */
829 end_ifcvt_sequence (struct noce_if_info
*if_info
)
832 rtx seq
= get_insns ();
834 set_used_flags (if_info
->x
);
835 set_used_flags (if_info
->cond
);
836 unshare_all_rtl_in_chain (seq
);
839 /* Make sure that all of the instructions emitted are recognizable,
840 and that we haven't introduced a new jump instruction.
841 As an exercise for the reader, build a general mechanism that
842 allows proper placement of required clobbers. */
843 for (insn
= seq
; insn
; insn
= NEXT_INSN (insn
))
845 || recog_memoized (insn
) == -1)
851 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
852 "if (a == b) x = a; else x = b" into "x = b". */
855 noce_try_move (struct noce_if_info
*if_info
)
857 rtx cond
= if_info
->cond
;
858 enum rtx_code code
= GET_CODE (cond
);
861 if (code
!= NE
&& code
!= EQ
)
864 /* This optimization isn't valid if either A or B could be a NaN
866 if (HONOR_NANS (GET_MODE (if_info
->x
))
867 || HONOR_SIGNED_ZEROS (GET_MODE (if_info
->x
)))
870 /* Check whether the operands of the comparison are A and in
872 if ((rtx_equal_p (if_info
->a
, XEXP (cond
, 0))
873 && rtx_equal_p (if_info
->b
, XEXP (cond
, 1)))
874 || (rtx_equal_p (if_info
->a
, XEXP (cond
, 1))
875 && rtx_equal_p (if_info
->b
, XEXP (cond
, 0))))
877 y
= (code
== EQ
) ? if_info
->a
: if_info
->b
;
879 /* Avoid generating the move if the source is the destination. */
880 if (! rtx_equal_p (if_info
->x
, y
))
883 noce_emit_move_insn (if_info
->x
, y
);
884 seq
= end_ifcvt_sequence (if_info
);
888 emit_insn_before_setloc (seq
, if_info
->jump
,
889 INSN_LOCATOR (if_info
->insn_a
));
896 /* Convert "if (test) x = 1; else x = 0".
898 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
899 tried in noce_try_store_flag_constants after noce_try_cmove has had
900 a go at the conversion. */
903 noce_try_store_flag (struct noce_if_info
*if_info
)
908 if (CONST_INT_P (if_info
->b
)
909 && INTVAL (if_info
->b
) == STORE_FLAG_VALUE
910 && if_info
->a
== const0_rtx
)
912 else if (if_info
->b
== const0_rtx
913 && CONST_INT_P (if_info
->a
)
914 && INTVAL (if_info
->a
) == STORE_FLAG_VALUE
915 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
923 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, 0);
926 if (target
!= if_info
->x
)
927 noce_emit_move_insn (if_info
->x
, target
);
929 seq
= end_ifcvt_sequence (if_info
);
933 emit_insn_before_setloc (seq
, if_info
->jump
,
934 INSN_LOCATOR (if_info
->insn_a
));
944 /* Convert "if (test) x = a; else x = b", for A and B constant. */
947 noce_try_store_flag_constants (struct noce_if_info
*if_info
)
951 HOST_WIDE_INT itrue
, ifalse
, diff
, tmp
;
952 int normalize
, can_reverse
;
953 enum machine_mode mode
;
955 if (CONST_INT_P (if_info
->a
)
956 && CONST_INT_P (if_info
->b
))
958 mode
= GET_MODE (if_info
->x
);
959 ifalse
= INTVAL (if_info
->a
);
960 itrue
= INTVAL (if_info
->b
);
962 /* Make sure we can represent the difference between the two values. */
963 if ((itrue
- ifalse
> 0)
964 != ((ifalse
< 0) != (itrue
< 0) ? ifalse
< 0 : ifalse
< itrue
))
967 diff
= trunc_int_for_mode (itrue
- ifalse
, mode
);
969 can_reverse
= (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
973 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
975 else if (ifalse
== 0 && exact_log2 (itrue
) >= 0
976 && (STORE_FLAG_VALUE
== 1
977 || if_info
->branch_cost
>= 2))
979 else if (itrue
== 0 && exact_log2 (ifalse
) >= 0 && can_reverse
980 && (STORE_FLAG_VALUE
== 1 || if_info
->branch_cost
>= 2))
981 normalize
= 1, reversep
= 1;
983 && (STORE_FLAG_VALUE
== -1
984 || if_info
->branch_cost
>= 2))
986 else if (ifalse
== -1 && can_reverse
987 && (STORE_FLAG_VALUE
== -1 || if_info
->branch_cost
>= 2))
988 normalize
= -1, reversep
= 1;
989 else if ((if_info
->branch_cost
>= 2 && STORE_FLAG_VALUE
== -1)
990 || if_info
->branch_cost
>= 3)
997 tmp
= itrue
; itrue
= ifalse
; ifalse
= tmp
;
998 diff
= trunc_int_for_mode (-diff
, mode
);
1002 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, normalize
);
1009 /* if (test) x = 3; else x = 4;
1010 => x = 3 + (test == 0); */
1011 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
1013 target
= expand_simple_binop (mode
,
1014 (diff
== STORE_FLAG_VALUE
1016 GEN_INT (ifalse
), target
, if_info
->x
, 0,
1020 /* if (test) x = 8; else x = 0;
1021 => x = (test != 0) << 3; */
1022 else if (ifalse
== 0 && (tmp
= exact_log2 (itrue
)) >= 0)
1024 target
= expand_simple_binop (mode
, ASHIFT
,
1025 target
, GEN_INT (tmp
), if_info
->x
, 0,
1029 /* if (test) x = -1; else x = b;
1030 => x = -(test != 0) | b; */
1031 else if (itrue
== -1)
1033 target
= expand_simple_binop (mode
, IOR
,
1034 target
, GEN_INT (ifalse
), if_info
->x
, 0,
1038 /* if (test) x = a; else x = b;
1039 => x = (-(test != 0) & (b - a)) + a; */
1042 target
= expand_simple_binop (mode
, AND
,
1043 target
, GEN_INT (diff
), if_info
->x
, 0,
1046 target
= expand_simple_binop (mode
, PLUS
,
1047 target
, GEN_INT (ifalse
),
1048 if_info
->x
, 0, OPTAB_WIDEN
);
1057 if (target
!= if_info
->x
)
1058 noce_emit_move_insn (if_info
->x
, target
);
1060 seq
= end_ifcvt_sequence (if_info
);
1064 emit_insn_before_setloc (seq
, if_info
->jump
,
1065 INSN_LOCATOR (if_info
->insn_a
));
1072 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1073 similarly for "foo--". */
1076 noce_try_addcc (struct noce_if_info
*if_info
)
1079 int subtract
, normalize
;
1081 if (GET_CODE (if_info
->a
) == PLUS
1082 && rtx_equal_p (XEXP (if_info
->a
, 0), if_info
->b
)
1083 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
1086 rtx cond
= if_info
->cond
;
1087 enum rtx_code code
= reversed_comparison_code (cond
, if_info
->jump
);
1089 /* First try to use addcc pattern. */
1090 if (general_operand (XEXP (cond
, 0), VOIDmode
)
1091 && general_operand (XEXP (cond
, 1), VOIDmode
))
1094 target
= emit_conditional_add (if_info
->x
, code
,
1099 XEXP (if_info
->a
, 1),
1100 GET_MODE (if_info
->x
),
1101 (code
== LTU
|| code
== GEU
1102 || code
== LEU
|| code
== GTU
));
1105 if (target
!= if_info
->x
)
1106 noce_emit_move_insn (if_info
->x
, target
);
1108 seq
= end_ifcvt_sequence (if_info
);
1112 emit_insn_before_setloc (seq
, if_info
->jump
,
1113 INSN_LOCATOR (if_info
->insn_a
));
1119 /* If that fails, construct conditional increment or decrement using
1121 if (if_info
->branch_cost
>= 2
1122 && (XEXP (if_info
->a
, 1) == const1_rtx
1123 || XEXP (if_info
->a
, 1) == constm1_rtx
))
1126 if (STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
1127 subtract
= 0, normalize
= 0;
1128 else if (-STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
1129 subtract
= 1, normalize
= 0;
1131 subtract
= 0, normalize
= INTVAL (XEXP (if_info
->a
, 1));
1134 target
= noce_emit_store_flag (if_info
,
1135 gen_reg_rtx (GET_MODE (if_info
->x
)),
1139 target
= expand_simple_binop (GET_MODE (if_info
->x
),
1140 subtract
? MINUS
: PLUS
,
1141 if_info
->b
, target
, if_info
->x
,
1145 if (target
!= if_info
->x
)
1146 noce_emit_move_insn (if_info
->x
, target
);
1148 seq
= end_ifcvt_sequence (if_info
);
1152 emit_insn_before_setloc (seq
, if_info
->jump
,
1153 INSN_LOCATOR (if_info
->insn_a
));
1163 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1166 noce_try_store_flag_mask (struct noce_if_info
*if_info
)
1172 if ((if_info
->branch_cost
>= 2
1173 || STORE_FLAG_VALUE
== -1)
1174 && ((if_info
->a
== const0_rtx
1175 && rtx_equal_p (if_info
->b
, if_info
->x
))
1176 || ((reversep
= (reversed_comparison_code (if_info
->cond
,
1179 && if_info
->b
== const0_rtx
1180 && rtx_equal_p (if_info
->a
, if_info
->x
))))
1183 target
= noce_emit_store_flag (if_info
,
1184 gen_reg_rtx (GET_MODE (if_info
->x
)),
1187 target
= expand_simple_binop (GET_MODE (if_info
->x
), AND
,
1189 target
, if_info
->x
, 0,
1194 if (target
!= if_info
->x
)
1195 noce_emit_move_insn (if_info
->x
, target
);
1197 seq
= end_ifcvt_sequence (if_info
);
1201 emit_insn_before_setloc (seq
, if_info
->jump
,
1202 INSN_LOCATOR (if_info
->insn_a
));
1212 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1215 noce_emit_cmove (struct noce_if_info
*if_info
, rtx x
, enum rtx_code code
,
1216 rtx cmp_a
, rtx cmp_b
, rtx vfalse
, rtx vtrue
)
1218 /* If earliest == jump, try to build the cmove insn directly.
1219 This is helpful when combine has created some complex condition
1220 (like for alpha's cmovlbs) that we can't hope to regenerate
1221 through the normal interface. */
1223 if (if_info
->cond_earliest
== if_info
->jump
)
1227 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (if_info
->cond
), cmp_a
, cmp_b
);
1228 tmp
= gen_rtx_IF_THEN_ELSE (GET_MODE (x
), tmp
, vtrue
, vfalse
);
1229 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
1232 tmp
= emit_insn (tmp
);
1234 if (recog_memoized (tmp
) >= 0)
1246 /* Don't even try if the comparison operands are weird. */
1247 if (! general_operand (cmp_a
, GET_MODE (cmp_a
))
1248 || ! general_operand (cmp_b
, GET_MODE (cmp_b
)))
1251 #if HAVE_conditional_move
1252 return emit_conditional_move (x
, code
, cmp_a
, cmp_b
, VOIDmode
,
1253 vtrue
, vfalse
, GET_MODE (x
),
1254 (code
== LTU
|| code
== GEU
1255 || code
== LEU
|| code
== GTU
));
1257 /* We'll never get here, as noce_process_if_block doesn't call the
1258 functions involved. Ifdef code, however, should be discouraged
1259 because it leads to typos in the code not selected. However,
1260 emit_conditional_move won't exist either. */
1265 /* Try only simple constants and registers here. More complex cases
1266 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1267 has had a go at it. */
1270 noce_try_cmove (struct noce_if_info
*if_info
)
1275 if ((CONSTANT_P (if_info
->a
) || register_operand (if_info
->a
, VOIDmode
))
1276 && (CONSTANT_P (if_info
->b
) || register_operand (if_info
->b
, VOIDmode
)))
1280 code
= GET_CODE (if_info
->cond
);
1281 target
= noce_emit_cmove (if_info
, if_info
->x
, code
,
1282 XEXP (if_info
->cond
, 0),
1283 XEXP (if_info
->cond
, 1),
1284 if_info
->a
, if_info
->b
);
1288 if (target
!= if_info
->x
)
1289 noce_emit_move_insn (if_info
->x
, target
);
1291 seq
= end_ifcvt_sequence (if_info
);
1295 emit_insn_before_setloc (seq
, if_info
->jump
,
1296 INSN_LOCATOR (if_info
->insn_a
));
1309 /* Try more complex cases involving conditional_move. */
1312 noce_try_cmove_arith (struct noce_if_info
*if_info
)
1324 /* A conditional move from two memory sources is equivalent to a
1325 conditional on their addresses followed by a load. Don't do this
1326 early because it'll screw alias analysis. Note that we've
1327 already checked for no side effects. */
1328 /* ??? FIXME: Magic number 5. */
1329 if (cse_not_expected
1330 && MEM_P (a
) && MEM_P (b
)
1331 && if_info
->branch_cost
>= 5)
1335 x
= gen_reg_rtx (Pmode
);
1339 /* ??? We could handle this if we knew that a load from A or B could
1340 not fault. This is also true if we've already loaded
1341 from the address along the path from ENTRY. */
1342 else if (may_trap_p (a
) || may_trap_p (b
))
1345 /* if (test) x = a + b; else x = c - d;
1352 code
= GET_CODE (if_info
->cond
);
1353 insn_a
= if_info
->insn_a
;
1354 insn_b
= if_info
->insn_b
;
1356 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1357 if insn_rtx_cost can't be estimated. */
1360 insn_cost
= insn_rtx_cost (PATTERN (insn_a
),
1361 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a
)));
1362 if (insn_cost
== 0 || insn_cost
> COSTS_N_INSNS (if_info
->branch_cost
))
1370 insn_cost
+= insn_rtx_cost (PATTERN (insn_b
),
1371 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b
)));
1372 if (insn_cost
== 0 || insn_cost
> COSTS_N_INSNS (if_info
->branch_cost
))
1376 /* Possibly rearrange operands to make things come out more natural. */
1377 if (reversed_comparison_code (if_info
->cond
, if_info
->jump
) != UNKNOWN
)
1380 if (rtx_equal_p (b
, x
))
1382 else if (general_operand (b
, GET_MODE (b
)))
1387 code
= reversed_comparison_code (if_info
->cond
, if_info
->jump
);
1388 tmp
= a
, a
= b
, b
= tmp
;
1389 tmp
= insn_a
, insn_a
= insn_b
, insn_b
= tmp
;
1398 /* If either operand is complex, load it into a register first.
1399 The best way to do this is to copy the original insn. In this
1400 way we preserve any clobbers etc that the insn may have had.
1401 This is of course not possible in the IS_MEM case. */
1402 if (! general_operand (a
, GET_MODE (a
)))
1408 tmp
= gen_reg_rtx (GET_MODE (a
));
1409 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, a
));
1412 goto end_seq_and_fail
;
1415 a
= gen_reg_rtx (GET_MODE (a
));
1416 tmp
= copy_rtx (insn_a
);
1417 set
= single_set (tmp
);
1419 tmp
= emit_insn (PATTERN (tmp
));
1421 if (recog_memoized (tmp
) < 0)
1422 goto end_seq_and_fail
;
1424 if (! general_operand (b
, GET_MODE (b
)))
1430 tmp
= gen_reg_rtx (GET_MODE (b
));
1431 tmp
= gen_rtx_SET (VOIDmode
, tmp
, b
);
1434 goto end_seq_and_fail
;
1437 b
= gen_reg_rtx (GET_MODE (b
));
1438 tmp
= copy_rtx (insn_b
);
1439 set
= single_set (tmp
);
1441 tmp
= PATTERN (tmp
);
1444 /* If insn to set up A clobbers any registers B depends on, try to
1445 swap insn that sets up A with the one that sets up B. If even
1446 that doesn't help, punt. */
1447 last
= get_last_insn ();
1448 if (last
&& modified_in_p (orig_b
, last
))
1450 tmp
= emit_insn_before (tmp
, get_insns ());
1451 if (modified_in_p (orig_a
, tmp
))
1452 goto end_seq_and_fail
;
1455 tmp
= emit_insn (tmp
);
1457 if (recog_memoized (tmp
) < 0)
1458 goto end_seq_and_fail
;
1461 target
= noce_emit_cmove (if_info
, x
, code
, XEXP (if_info
->cond
, 0),
1462 XEXP (if_info
->cond
, 1), a
, b
);
1465 goto end_seq_and_fail
;
1467 /* If we're handling a memory for above, emit the load now. */
1470 tmp
= gen_rtx_MEM (GET_MODE (if_info
->x
), target
);
1472 /* Copy over flags as appropriate. */
1473 if (MEM_VOLATILE_P (if_info
->a
) || MEM_VOLATILE_P (if_info
->b
))
1474 MEM_VOLATILE_P (tmp
) = 1;
1475 if (MEM_IN_STRUCT_P (if_info
->a
) && MEM_IN_STRUCT_P (if_info
->b
))
1476 MEM_IN_STRUCT_P (tmp
) = 1;
1477 if (MEM_SCALAR_P (if_info
->a
) && MEM_SCALAR_P (if_info
->b
))
1478 MEM_SCALAR_P (tmp
) = 1;
1479 if (MEM_ALIAS_SET (if_info
->a
) == MEM_ALIAS_SET (if_info
->b
))
1480 set_mem_alias_set (tmp
, MEM_ALIAS_SET (if_info
->a
));
1482 MIN (MEM_ALIGN (if_info
->a
), MEM_ALIGN (if_info
->b
)));
1484 noce_emit_move_insn (if_info
->x
, tmp
);
1486 else if (target
!= x
)
1487 noce_emit_move_insn (x
, target
);
1489 tmp
= end_ifcvt_sequence (if_info
);
1493 emit_insn_before_setloc (tmp
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1501 /* For most cases, the simplified condition we found is the best
1502 choice, but this is not the case for the min/max/abs transforms.
1503 For these we wish to know that it is A or B in the condition. */
1506 noce_get_alt_condition (struct noce_if_info
*if_info
, rtx target
,
1509 rtx cond
, set
, insn
;
1512 /* If target is already mentioned in the known condition, return it. */
1513 if (reg_mentioned_p (target
, if_info
->cond
))
1515 *earliest
= if_info
->cond_earliest
;
1516 return if_info
->cond
;
1519 set
= pc_set (if_info
->jump
);
1520 cond
= XEXP (SET_SRC (set
), 0);
1522 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1523 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (if_info
->jump
);
1524 if (if_info
->then_else_reversed
)
1527 /* If we're looking for a constant, try to make the conditional
1528 have that constant in it. There are two reasons why it may
1529 not have the constant we want:
1531 1. GCC may have needed to put the constant in a register, because
1532 the target can't compare directly against that constant. For
1533 this case, we look for a SET immediately before the comparison
1534 that puts a constant in that register.
1536 2. GCC may have canonicalized the conditional, for example
1537 replacing "if x < 4" with "if x <= 3". We can undo that (or
1538 make equivalent types of changes) to get the constants we need
1539 if they're off by one in the right direction. */
1541 if (CONST_INT_P (target
))
1543 enum rtx_code code
= GET_CODE (if_info
->cond
);
1544 rtx op_a
= XEXP (if_info
->cond
, 0);
1545 rtx op_b
= XEXP (if_info
->cond
, 1);
1548 /* First, look to see if we put a constant in a register. */
1549 prev_insn
= prev_nonnote_insn (if_info
->cond_earliest
);
1551 && BLOCK_NUM (prev_insn
) == BLOCK_NUM (if_info
->cond_earliest
)
1552 && INSN_P (prev_insn
)
1553 && GET_CODE (PATTERN (prev_insn
)) == SET
)
1555 rtx src
= find_reg_equal_equiv_note (prev_insn
);
1557 src
= SET_SRC (PATTERN (prev_insn
));
1558 if (CONST_INT_P (src
))
1560 if (rtx_equal_p (op_a
, SET_DEST (PATTERN (prev_insn
))))
1562 else if (rtx_equal_p (op_b
, SET_DEST (PATTERN (prev_insn
))))
1565 if (CONST_INT_P (op_a
))
1570 code
= swap_condition (code
);
1575 /* Now, look to see if we can get the right constant by
1576 adjusting the conditional. */
1577 if (CONST_INT_P (op_b
))
1579 HOST_WIDE_INT desired_val
= INTVAL (target
);
1580 HOST_WIDE_INT actual_val
= INTVAL (op_b
);
1585 if (actual_val
== desired_val
+ 1)
1588 op_b
= GEN_INT (desired_val
);
1592 if (actual_val
== desired_val
- 1)
1595 op_b
= GEN_INT (desired_val
);
1599 if (actual_val
== desired_val
- 1)
1602 op_b
= GEN_INT (desired_val
);
1606 if (actual_val
== desired_val
+ 1)
1609 op_b
= GEN_INT (desired_val
);
1617 /* If we made any changes, generate a new conditional that is
1618 equivalent to what we started with, but has the right
1620 if (code
!= GET_CODE (if_info
->cond
)
1621 || op_a
!= XEXP (if_info
->cond
, 0)
1622 || op_b
!= XEXP (if_info
->cond
, 1))
1624 cond
= gen_rtx_fmt_ee (code
, GET_MODE (cond
), op_a
, op_b
);
1625 *earliest
= if_info
->cond_earliest
;
1630 cond
= canonicalize_condition (if_info
->jump
, cond
, reverse
,
1631 earliest
, target
, false, true);
1632 if (! cond
|| ! reg_mentioned_p (target
, cond
))
1635 /* We almost certainly searched back to a different place.
1636 Need to re-verify correct lifetimes. */
1638 /* X may not be mentioned in the range (cond_earliest, jump]. */
1639 for (insn
= if_info
->jump
; insn
!= *earliest
; insn
= PREV_INSN (insn
))
1640 if (INSN_P (insn
) && reg_overlap_mentioned_p (if_info
->x
, PATTERN (insn
)))
1643 /* A and B may not be modified in the range [cond_earliest, jump). */
1644 for (insn
= *earliest
; insn
!= if_info
->jump
; insn
= NEXT_INSN (insn
))
1646 && (modified_in_p (if_info
->a
, insn
)
1647 || modified_in_p (if_info
->b
, insn
)))
1653 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1656 noce_try_minmax (struct noce_if_info
*if_info
)
1658 rtx cond
, earliest
, target
, seq
;
1659 enum rtx_code code
, op
;
1662 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1663 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1664 to get the target to tell us... */
1665 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info
->x
))
1666 || HONOR_NANS (GET_MODE (if_info
->x
)))
1669 cond
= noce_get_alt_condition (if_info
, if_info
->a
, &earliest
);
1673 /* Verify the condition is of the form we expect, and canonicalize
1674 the comparison code. */
1675 code
= GET_CODE (cond
);
1676 if (rtx_equal_p (XEXP (cond
, 0), if_info
->a
))
1678 if (! rtx_equal_p (XEXP (cond
, 1), if_info
->b
))
1681 else if (rtx_equal_p (XEXP (cond
, 1), if_info
->a
))
1683 if (! rtx_equal_p (XEXP (cond
, 0), if_info
->b
))
1685 code
= swap_condition (code
);
1690 /* Determine what sort of operation this is. Note that the code is for
1691 a taken branch, so the code->operation mapping appears backwards. */
1724 target
= expand_simple_binop (GET_MODE (if_info
->x
), op
,
1725 if_info
->a
, if_info
->b
,
1726 if_info
->x
, unsignedp
, OPTAB_WIDEN
);
1732 if (target
!= if_info
->x
)
1733 noce_emit_move_insn (if_info
->x
, target
);
1735 seq
= end_ifcvt_sequence (if_info
);
1739 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1740 if_info
->cond
= cond
;
1741 if_info
->cond_earliest
= earliest
;
1746 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1749 noce_try_abs (struct noce_if_info
*if_info
)
1751 rtx cond
, earliest
, target
, seq
, a
, b
, c
;
1754 /* Reject modes with signed zeros. */
1755 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info
->x
)))
1758 /* Recognize A and B as constituting an ABS or NABS. The canonical
1759 form is a branch around the negation, taken when the object is the
1760 first operand of a comparison against 0 that evaluates to true. */
1763 if (GET_CODE (a
) == NEG
&& rtx_equal_p (XEXP (a
, 0), b
))
1765 else if (GET_CODE (b
) == NEG
&& rtx_equal_p (XEXP (b
, 0), a
))
1767 c
= a
; a
= b
; b
= c
;
1773 cond
= noce_get_alt_condition (if_info
, b
, &earliest
);
1777 /* Verify the condition is of the form we expect. */
1778 if (rtx_equal_p (XEXP (cond
, 0), b
))
1780 else if (rtx_equal_p (XEXP (cond
, 1), b
))
1788 /* Verify that C is zero. Search one step backward for a
1789 REG_EQUAL note or a simple source if necessary. */
1792 rtx set
, insn
= prev_nonnote_insn (earliest
);
1794 && BLOCK_NUM (insn
) == BLOCK_NUM (earliest
)
1795 && (set
= single_set (insn
))
1796 && rtx_equal_p (SET_DEST (set
), c
))
1798 rtx note
= find_reg_equal_equiv_note (insn
);
1808 && GET_CODE (XEXP (c
, 0)) == SYMBOL_REF
1809 && CONSTANT_POOL_ADDRESS_P (XEXP (c
, 0)))
1810 c
= get_pool_constant (XEXP (c
, 0));
1812 /* Work around funny ideas get_condition has wrt canonicalization.
1813 Note that these rtx constants are known to be CONST_INT, and
1814 therefore imply integer comparisons. */
1815 if (c
== constm1_rtx
&& GET_CODE (cond
) == GT
)
1817 else if (c
== const1_rtx
&& GET_CODE (cond
) == LT
)
1819 else if (c
!= CONST0_RTX (GET_MODE (b
)))
1822 /* Determine what sort of operation this is. */
1823 switch (GET_CODE (cond
))
1842 target
= expand_abs_nojump (GET_MODE (if_info
->x
), b
, if_info
->x
, 1);
1844 /* ??? It's a quandary whether cmove would be better here, especially
1845 for integers. Perhaps combine will clean things up. */
1846 if (target
&& negate
)
1847 target
= expand_simple_unop (GET_MODE (target
), NEG
, target
, if_info
->x
, 0);
1855 if (target
!= if_info
->x
)
1856 noce_emit_move_insn (if_info
->x
, target
);
1858 seq
= end_ifcvt_sequence (if_info
);
1862 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1863 if_info
->cond
= cond
;
1864 if_info
->cond_earliest
= earliest
;
1869 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1872 noce_try_sign_mask (struct noce_if_info
*if_info
)
1874 rtx cond
, t
, m
, c
, seq
;
1875 enum machine_mode mode
;
1877 bool t_unconditional
;
1879 cond
= if_info
->cond
;
1880 code
= GET_CODE (cond
);
1885 if (if_info
->a
== const0_rtx
)
1887 if ((code
== LT
&& c
== const0_rtx
)
1888 || (code
== LE
&& c
== constm1_rtx
))
1891 else if (if_info
->b
== const0_rtx
)
1893 if ((code
== GE
&& c
== const0_rtx
)
1894 || (code
== GT
&& c
== constm1_rtx
))
1898 if (! t
|| side_effects_p (t
))
1901 /* We currently don't handle different modes. */
1902 mode
= GET_MODE (t
);
1903 if (GET_MODE (m
) != mode
)
1906 /* This is only profitable if T is unconditionally executed/evaluated in the
1907 original insn sequence or T is cheap. The former happens if B is the
1908 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
1909 INSN_B which can happen for e.g. conditional stores to memory. For the
1910 cost computation use the block TEST_BB where the evaluation will end up
1911 after the transformation. */
1914 && (if_info
->insn_b
== NULL_RTX
1915 || BLOCK_FOR_INSN (if_info
->insn_b
) == if_info
->test_bb
));
1916 if (!(t_unconditional
1917 || (rtx_cost (t
, SET
, optimize_bb_for_speed_p (if_info
->test_bb
))
1918 < COSTS_N_INSNS (2))))
1922 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1923 "(signed) m >> 31" directly. This benefits targets with specialized
1924 insns to obtain the signmask, but still uses ashr_optab otherwise. */
1925 m
= emit_store_flag (gen_reg_rtx (mode
), LT
, m
, const0_rtx
, mode
, 0, -1);
1926 t
= m
? expand_binop (mode
, and_optab
, m
, t
, NULL_RTX
, 0, OPTAB_DIRECT
)
1935 noce_emit_move_insn (if_info
->x
, t
);
1937 seq
= end_ifcvt_sequence (if_info
);
1941 emit_insn_before_setloc (seq
, if_info
->jump
, INSN_LOCATOR (if_info
->insn_a
));
1946 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
1950 noce_try_bitop (struct noce_if_info
*if_info
)
1952 rtx cond
, x
, a
, result
, seq
;
1953 enum machine_mode mode
;
1958 cond
= if_info
->cond
;
1959 code
= GET_CODE (cond
);
1961 /* Check for no else condition. */
1962 if (! rtx_equal_p (x
, if_info
->b
))
1965 /* Check for a suitable condition. */
1966 if (code
!= NE
&& code
!= EQ
)
1968 if (XEXP (cond
, 1) != const0_rtx
)
1970 cond
= XEXP (cond
, 0);
1972 /* ??? We could also handle AND here. */
1973 if (GET_CODE (cond
) == ZERO_EXTRACT
)
1975 if (XEXP (cond
, 1) != const1_rtx
1976 || !CONST_INT_P (XEXP (cond
, 2))
1977 || ! rtx_equal_p (x
, XEXP (cond
, 0)))
1979 bitnum
= INTVAL (XEXP (cond
, 2));
1980 mode
= GET_MODE (x
);
1981 if (BITS_BIG_ENDIAN
)
1982 bitnum
= GET_MODE_BITSIZE (mode
) - 1 - bitnum
;
1983 if (bitnum
< 0 || bitnum
>= HOST_BITS_PER_WIDE_INT
)
1990 if (GET_CODE (a
) == IOR
|| GET_CODE (a
) == XOR
)
1992 /* Check for "if (X & C) x = x op C". */
1993 if (! rtx_equal_p (x
, XEXP (a
, 0))
1994 || !CONST_INT_P (XEXP (a
, 1))
1995 || (INTVAL (XEXP (a
, 1)) & GET_MODE_MASK (mode
))
1996 != (unsigned HOST_WIDE_INT
) 1 << bitnum
)
1999 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2000 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2001 if (GET_CODE (a
) == IOR
)
2002 result
= (code
== NE
) ? a
: NULL_RTX
;
2003 else if (code
== NE
)
2005 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2006 result
= gen_int_mode ((HOST_WIDE_INT
) 1 << bitnum
, mode
);
2007 result
= simplify_gen_binary (IOR
, mode
, x
, result
);
2011 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2012 result
= gen_int_mode (~((HOST_WIDE_INT
) 1 << bitnum
), mode
);
2013 result
= simplify_gen_binary (AND
, mode
, x
, result
);
2016 else if (GET_CODE (a
) == AND
)
2018 /* Check for "if (X & C) x &= ~C". */
2019 if (! rtx_equal_p (x
, XEXP (a
, 0))
2020 || !CONST_INT_P (XEXP (a
, 1))
2021 || (INTVAL (XEXP (a
, 1)) & GET_MODE_MASK (mode
))
2022 != (~((HOST_WIDE_INT
) 1 << bitnum
) & GET_MODE_MASK (mode
)))
2025 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2026 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2027 result
= (code
== EQ
) ? a
: NULL_RTX
;
2035 noce_emit_move_insn (x
, result
);
2036 seq
= end_ifcvt_sequence (if_info
);
2040 emit_insn_before_setloc (seq
, if_info
->jump
,
2041 INSN_LOCATOR (if_info
->insn_a
));
2047 /* Similar to get_condition, only the resulting condition must be
2048 valid at JUMP, instead of at EARLIEST.
2050 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2051 THEN block of the caller, and we have to reverse the condition. */
2054 noce_get_condition (rtx jump
, rtx
*earliest
, bool then_else_reversed
)
2059 if (! any_condjump_p (jump
))
2062 set
= pc_set (jump
);
2064 /* If this branches to JUMP_LABEL when the condition is false,
2065 reverse the condition. */
2066 reverse
= (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
2067 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
));
2069 /* We may have to reverse because the caller's if block is not canonical,
2070 i.e. the THEN block isn't the fallthrough block for the TEST block
2071 (see find_if_header). */
2072 if (then_else_reversed
)
2075 /* If the condition variable is a register and is MODE_INT, accept it. */
2077 cond
= XEXP (SET_SRC (set
), 0);
2078 tmp
= XEXP (cond
, 0);
2079 if (REG_P (tmp
) && GET_MODE_CLASS (GET_MODE (tmp
)) == MODE_INT
)
2084 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
2085 GET_MODE (cond
), tmp
, XEXP (cond
, 1));
2089 /* Otherwise, fall back on canonicalize_condition to do the dirty
2090 work of manipulating MODE_CC values and COMPARE rtx codes. */
2091 return canonicalize_condition (jump
, cond
, reverse
, earliest
,
2092 NULL_RTX
, false, true);
2095 /* Return true if OP is ok for if-then-else processing. */
2098 noce_operand_ok (const_rtx op
)
2100 /* We special-case memories, so handle any of them with
2101 no address side effects. */
2103 return ! side_effects_p (XEXP (op
, 0));
2105 if (side_effects_p (op
))
2108 return ! may_trap_p (op
);
2111 /* Return true if a write into MEM may trap or fault. */
2114 noce_mem_write_may_trap_or_fault_p (const_rtx mem
)
2118 if (MEM_READONLY_P (mem
))
2121 if (may_trap_or_fault_p (mem
))
2124 addr
= XEXP (mem
, 0);
2126 /* Call target hook to avoid the effects of -fpic etc.... */
2127 addr
= targetm
.delegitimize_address (addr
);
2130 switch (GET_CODE (addr
))
2138 addr
= XEXP (addr
, 0);
2142 addr
= XEXP (addr
, 1);
2145 if (CONST_INT_P (XEXP (addr
, 1)))
2146 addr
= XEXP (addr
, 0);
2153 if (SYMBOL_REF_DECL (addr
)
2154 && decl_readonly_section (SYMBOL_REF_DECL (addr
), 0))
2164 /* Return whether we can use store speculation for MEM. TOP_BB is the
2165 basic block above the conditional block where we are considering
2166 doing the speculative store. We look for whether MEM is set
2167 unconditionally later in the function. */
2170 noce_can_store_speculate_p (basic_block top_bb
, const_rtx mem
)
2172 basic_block dominator
;
2174 for (dominator
= get_immediate_dominator (CDI_POST_DOMINATORS
, top_bb
);
2176 dominator
= get_immediate_dominator (CDI_POST_DOMINATORS
, dominator
))
2180 FOR_BB_INSNS (dominator
, insn
)
2182 /* If we see something that might be a memory barrier, we
2183 have to stop looking. Even if the MEM is set later in
2184 the function, we still don't want to set it
2185 unconditionally before the barrier. */
2187 && (volatile_insn_p (PATTERN (insn
))
2188 || (CALL_P (insn
) && (!RTL_CONST_CALL_P (insn
)))))
2191 if (memory_modified_in_insn_p (mem
, insn
))
2193 if (modified_in_p (XEXP (mem
, 0), insn
))
2202 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2203 it without using conditional execution. Return TRUE if we were successful
2204 at converting the block. */
2207 noce_process_if_block (struct noce_if_info
*if_info
)
2209 basic_block test_bb
= if_info
->test_bb
; /* test block */
2210 basic_block then_bb
= if_info
->then_bb
; /* THEN */
2211 basic_block else_bb
= if_info
->else_bb
; /* ELSE or NULL */
2212 basic_block join_bb
= if_info
->join_bb
; /* JOIN */
2213 rtx jump
= if_info
->jump
;
2214 rtx cond
= if_info
->cond
;
2217 rtx orig_x
, x
, a
, b
;
2219 /* We're looking for patterns of the form
2221 (1) if (...) x = a; else x = b;
2222 (2) x = b; if (...) x = a;
2223 (3) if (...) x = a; // as if with an initial x = x.
2225 The later patterns require jumps to be more expensive.
2227 ??? For future expansion, look for multiple X in such patterns. */
2229 /* Look for one of the potential sets. */
2230 insn_a
= first_active_insn (then_bb
);
2232 || insn_a
!= last_active_insn (then_bb
, FALSE
)
2233 || (set_a
= single_set (insn_a
)) == NULL_RTX
)
2236 x
= SET_DEST (set_a
);
2237 a
= SET_SRC (set_a
);
2239 /* Look for the other potential set. Make sure we've got equivalent
2241 /* ??? This is overconservative. Storing to two different mems is
2242 as easy as conditionally computing the address. Storing to a
2243 single mem merely requires a scratch memory to use as one of the
2244 destination addresses; often the memory immediately below the
2245 stack pointer is available for this. */
2249 insn_b
= first_active_insn (else_bb
);
2251 || insn_b
!= last_active_insn (else_bb
, FALSE
)
2252 || (set_b
= single_set (insn_b
)) == NULL_RTX
2253 || ! rtx_equal_p (x
, SET_DEST (set_b
)))
2258 insn_b
= prev_nonnote_insn (if_info
->cond_earliest
);
2259 /* We're going to be moving the evaluation of B down from above
2260 COND_EARLIEST to JUMP. Make sure the relevant data is still
2263 || BLOCK_NUM (insn_b
) != BLOCK_NUM (if_info
->cond_earliest
)
2264 || !NONJUMP_INSN_P (insn_b
)
2265 || (set_b
= single_set (insn_b
)) == NULL_RTX
2266 || ! rtx_equal_p (x
, SET_DEST (set_b
))
2267 || ! noce_operand_ok (SET_SRC (set_b
))
2268 || reg_overlap_mentioned_p (x
, SET_SRC (set_b
))
2269 || modified_between_p (SET_SRC (set_b
),
2270 PREV_INSN (if_info
->cond_earliest
), jump
)
2271 /* Likewise with X. In particular this can happen when
2272 noce_get_condition looks farther back in the instruction
2273 stream than one might expect. */
2274 || reg_overlap_mentioned_p (x
, cond
)
2275 || reg_overlap_mentioned_p (x
, a
)
2276 || modified_between_p (x
, PREV_INSN (if_info
->cond_earliest
), jump
))
2277 insn_b
= set_b
= NULL_RTX
;
2280 /* If x has side effects then only the if-then-else form is safe to
2281 convert. But even in that case we would need to restore any notes
2282 (such as REG_INC) at then end. That can be tricky if
2283 noce_emit_move_insn expands to more than one insn, so disable the
2284 optimization entirely for now if there are side effects. */
2285 if (side_effects_p (x
))
2288 b
= (set_b
? SET_SRC (set_b
) : x
);
2290 /* Only operate on register destinations, and even then avoid extending
2291 the lifetime of hard registers on small register class machines. */
2294 || (SMALL_REGISTER_CLASSES
2295 && REGNO (x
) < FIRST_PSEUDO_REGISTER
))
2297 if (GET_MODE (x
) == BLKmode
)
2300 if (GET_CODE (x
) == ZERO_EXTRACT
2301 && (!CONST_INT_P (XEXP (x
, 1))
2302 || !CONST_INT_P (XEXP (x
, 2))))
2305 x
= gen_reg_rtx (GET_MODE (GET_CODE (x
) == STRICT_LOW_PART
2306 ? XEXP (x
, 0) : x
));
2309 /* Don't operate on sources that may trap or are volatile. */
2310 if (! noce_operand_ok (a
) || ! noce_operand_ok (b
))
2314 /* Set up the info block for our subroutines. */
2315 if_info
->insn_a
= insn_a
;
2316 if_info
->insn_b
= insn_b
;
2321 /* Try optimizations in some approximation of a useful order. */
2322 /* ??? Should first look to see if X is live incoming at all. If it
2323 isn't, we don't need anything but an unconditional set. */
2325 /* Look and see if A and B are really the same. Avoid creating silly
2326 cmove constructs that no one will fix up later. */
2327 if (rtx_equal_p (a
, b
))
2329 /* If we have an INSN_B, we don't have to create any new rtl. Just
2330 move the instruction that we already have. If we don't have an
2331 INSN_B, that means that A == X, and we've got a noop move. In
2332 that case don't do anything and let the code below delete INSN_A. */
2333 if (insn_b
&& else_bb
)
2337 if (else_bb
&& insn_b
== BB_END (else_bb
))
2338 BB_END (else_bb
) = PREV_INSN (insn_b
);
2339 reorder_insns (insn_b
, insn_b
, PREV_INSN (jump
));
2341 /* If there was a REG_EQUAL note, delete it since it may have been
2342 true due to this insn being after a jump. */
2343 if ((note
= find_reg_note (insn_b
, REG_EQUAL
, NULL_RTX
)) != 0)
2344 remove_note (insn_b
, note
);
2348 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2349 x must be executed twice. */
2350 else if (insn_b
&& side_effects_p (orig_x
))
2357 if (!set_b
&& MEM_P (orig_x
))
2359 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2360 for optimizations if writing to x may trap or fault,
2361 i.e. it's a memory other than a static var or a stack slot,
2362 is misaligned on strict aligned machines or is read-only. If
2363 x is a read-only memory, then the program is valid only if we
2364 avoid the store into it. If there are stores on both the
2365 THEN and ELSE arms, then we can go ahead with the conversion;
2366 either the program is broken, or the condition is always
2367 false such that the other memory is selected. */
2368 if (noce_mem_write_may_trap_or_fault_p (orig_x
))
2371 /* Avoid store speculation: given "if (...) x = a" where x is a
2372 MEM, we only want to do the store if x is always set
2373 somewhere in the function. This avoids cases like
2374 if (pthread_mutex_trylock(mutex))
2376 where we only want global_variable to be changed if the mutex
2377 is held. FIXME: This should ideally be expressed directly in
2379 if (!noce_can_store_speculate_p (test_bb
, orig_x
))
2383 if (noce_try_move (if_info
))
2385 if (noce_try_store_flag (if_info
))
2387 if (noce_try_bitop (if_info
))
2389 if (noce_try_minmax (if_info
))
2391 if (noce_try_abs (if_info
))
2393 if (HAVE_conditional_move
2394 && noce_try_cmove (if_info
))
2396 if (! HAVE_conditional_execution
)
2398 if (noce_try_store_flag_constants (if_info
))
2400 if (noce_try_addcc (if_info
))
2402 if (noce_try_store_flag_mask (if_info
))
2404 if (HAVE_conditional_move
2405 && noce_try_cmove_arith (if_info
))
2407 if (noce_try_sign_mask (if_info
))
2411 if (!else_bb
&& set_b
)
2413 insn_b
= set_b
= NULL_RTX
;
2422 /* If we used a temporary, fix it up now. */
2428 noce_emit_move_insn (orig_x
, x
);
2430 set_used_flags (orig_x
);
2431 unshare_all_rtl_in_chain (seq
);
2434 emit_insn_before_setloc (seq
, BB_END (test_bb
), INSN_LOCATOR (insn_a
));
2437 /* The original THEN and ELSE blocks may now be removed. The test block
2438 must now jump to the join block. If the test block and the join block
2439 can be merged, do so. */
2442 delete_basic_block (else_bb
);
2446 remove_edge (find_edge (test_bb
, join_bb
));
2448 remove_edge (find_edge (then_bb
, join_bb
));
2449 redirect_edge_and_branch_force (single_succ_edge (test_bb
), join_bb
);
2450 delete_basic_block (then_bb
);
2453 if (can_merge_blocks_p (test_bb
, join_bb
))
2455 merge_blocks (test_bb
, join_bb
);
2459 num_updated_if_blocks
++;
2463 /* Check whether a block is suitable for conditional move conversion.
2464 Every insn must be a simple set of a register to a constant or a
2465 register. For each assignment, store the value in the array VALS,
2466 indexed by register number, then store the register number in
2467 REGS. COND is the condition we will test. */
2470 check_cond_move_block (basic_block bb
, rtx
*vals
, VEC (int, heap
) **regs
, rtx cond
)
2474 /* We can only handle simple jumps at the end of the basic block.
2475 It is almost impossible to update the CFG otherwise. */
2477 if (JUMP_P (insn
) && !onlyjump_p (insn
))
2480 FOR_BB_INSNS (bb
, insn
)
2484 if (!INSN_P (insn
) || JUMP_P (insn
))
2486 set
= single_set (insn
);
2490 dest
= SET_DEST (set
);
2491 src
= SET_SRC (set
);
2493 || (SMALL_REGISTER_CLASSES
&& HARD_REGISTER_P (dest
)))
2496 if (!CONSTANT_P (src
) && !register_operand (src
, VOIDmode
))
2499 if (side_effects_p (src
) || side_effects_p (dest
))
2502 if (may_trap_p (src
) || may_trap_p (dest
))
2505 /* Don't try to handle this if the source register was
2506 modified earlier in the block. */
2508 && vals
[REGNO (src
)] != NULL
)
2509 || (GET_CODE (src
) == SUBREG
&& REG_P (SUBREG_REG (src
))
2510 && vals
[REGNO (SUBREG_REG (src
))] != NULL
))
2513 /* Don't try to handle this if the destination register was
2514 modified earlier in the block. */
2515 if (vals
[REGNO (dest
)] != NULL
)
2518 /* Don't try to handle this if the condition uses the
2519 destination register. */
2520 if (reg_overlap_mentioned_p (dest
, cond
))
2523 /* Don't try to handle this if the source register is modified
2524 later in the block. */
2525 if (!CONSTANT_P (src
)
2526 && modified_between_p (src
, insn
, NEXT_INSN (BB_END (bb
))))
2529 vals
[REGNO (dest
)] = src
;
2531 VEC_safe_push (int, heap
, *regs
, REGNO (dest
));
2537 /* Given a basic block BB suitable for conditional move conversion,
2538 a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2539 register values depending on COND, emit the insns in the block as
2540 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2541 processed. The caller has started a sequence for the conversion.
2542 Return true if successful, false if something goes wrong. */
2545 cond_move_convert_if_block (struct noce_if_info
*if_infop
,
2546 basic_block bb
, rtx cond
,
2547 rtx
*then_vals
, rtx
*else_vals
,
2551 rtx insn
, cond_arg0
, cond_arg1
;
2553 code
= GET_CODE (cond
);
2554 cond_arg0
= XEXP (cond
, 0);
2555 cond_arg1
= XEXP (cond
, 1);
2557 FOR_BB_INSNS (bb
, insn
)
2559 rtx set
, target
, dest
, t
, e
;
2562 if (!INSN_P (insn
) || JUMP_P (insn
))
2564 set
= single_set (insn
);
2565 gcc_assert (set
&& REG_P (SET_DEST (set
)));
2567 dest
= SET_DEST (set
);
2568 regno
= REGNO (dest
);
2570 t
= then_vals
[regno
];
2571 e
= else_vals
[regno
];
2575 /* If this register was set in the then block, we already
2576 handled this case there. */
2589 target
= noce_emit_cmove (if_infop
, dest
, code
, cond_arg0
, cond_arg1
,
2595 noce_emit_move_insn (dest
, target
);
2601 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2602 it using only conditional moves. Return TRUE if we were successful at
2603 converting the block. */
2606 cond_move_process_if_block (struct noce_if_info
*if_info
)
2608 basic_block test_bb
= if_info
->test_bb
;
2609 basic_block then_bb
= if_info
->then_bb
;
2610 basic_block else_bb
= if_info
->else_bb
;
2611 basic_block join_bb
= if_info
->join_bb
;
2612 rtx jump
= if_info
->jump
;
2613 rtx cond
= if_info
->cond
;
2615 int max_reg
, size
, c
, reg
;
2618 VEC (int, heap
) *then_regs
= NULL
;
2619 VEC (int, heap
) *else_regs
= NULL
;
2622 /* Build a mapping for each block to the value used for each
2624 max_reg
= max_reg_num ();
2625 size
= (max_reg
+ 1) * sizeof (rtx
);
2626 then_vals
= (rtx
*) alloca (size
);
2627 else_vals
= (rtx
*) alloca (size
);
2628 memset (then_vals
, 0, size
);
2629 memset (else_vals
, 0, size
);
2631 /* Make sure the blocks are suitable. */
2632 if (!check_cond_move_block (then_bb
, then_vals
, &then_regs
, cond
)
2633 || (else_bb
&& !check_cond_move_block (else_bb
, else_vals
, &else_regs
, cond
)))
2635 VEC_free (int, heap
, then_regs
);
2636 VEC_free (int, heap
, else_regs
);
2640 /* Make sure the blocks can be used together. If the same register
2641 is set in both blocks, and is not set to a constant in both
2642 cases, then both blocks must set it to the same register. We
2643 have already verified that if it is set to a register, that the
2644 source register does not change after the assignment. Also count
2645 the number of registers set in only one of the blocks. */
2647 for (i
= 0; VEC_iterate (int, then_regs
, i
, reg
); i
++)
2649 if (!then_vals
[reg
] && !else_vals
[reg
])
2652 if (!else_vals
[reg
])
2656 if (!CONSTANT_P (then_vals
[reg
])
2657 && !CONSTANT_P (else_vals
[reg
])
2658 && !rtx_equal_p (then_vals
[reg
], else_vals
[reg
]))
2660 VEC_free (int, heap
, then_regs
);
2661 VEC_free (int, heap
, else_regs
);
2667 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
2668 for (i
= 0; VEC_iterate (int, else_regs
, i
, reg
); ++i
)
2669 if (!then_vals
[reg
])
2672 /* Make sure it is reasonable to convert this block. What matters
2673 is the number of assignments currently made in only one of the
2674 branches, since if we convert we are going to always execute
2676 if (c
> MAX_CONDITIONAL_EXECUTE
)
2678 VEC_free (int, heap
, then_regs
);
2679 VEC_free (int, heap
, else_regs
);
2683 /* Try to emit the conditional moves. First do the then block,
2684 then do anything left in the else blocks. */
2686 if (!cond_move_convert_if_block (if_info
, then_bb
, cond
,
2687 then_vals
, else_vals
, false)
2689 && !cond_move_convert_if_block (if_info
, else_bb
, cond
,
2690 then_vals
, else_vals
, true)))
2693 VEC_free (int, heap
, then_regs
);
2694 VEC_free (int, heap
, else_regs
);
2697 seq
= end_ifcvt_sequence (if_info
);
2700 VEC_free (int, heap
, then_regs
);
2701 VEC_free (int, heap
, else_regs
);
2705 loc_insn
= first_active_insn (then_bb
);
2708 loc_insn
= first_active_insn (else_bb
);
2709 gcc_assert (loc_insn
);
2711 emit_insn_before_setloc (seq
, jump
, INSN_LOCATOR (loc_insn
));
2715 delete_basic_block (else_bb
);
2719 remove_edge (find_edge (test_bb
, join_bb
));
2721 remove_edge (find_edge (then_bb
, join_bb
));
2722 redirect_edge_and_branch_force (single_succ_edge (test_bb
), join_bb
);
2723 delete_basic_block (then_bb
);
2726 if (can_merge_blocks_p (test_bb
, join_bb
))
2728 merge_blocks (test_bb
, join_bb
);
2732 num_updated_if_blocks
++;
2734 VEC_free (int, heap
, then_regs
);
2735 VEC_free (int, heap
, else_regs
);
2740 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2741 IF-THEN-ELSE-JOIN block.
2743 If so, we'll try to convert the insns to not require the branch,
2744 using only transformations that do not require conditional execution.
2746 Return TRUE if we were successful at converting the block. */
2749 noce_find_if_block (basic_block test_bb
,
2750 edge then_edge
, edge else_edge
,
2753 basic_block then_bb
, else_bb
, join_bb
;
2754 bool then_else_reversed
= false;
2757 struct noce_if_info if_info
;
2759 /* We only ever should get here before reload. */
2760 gcc_assert (!reload_completed
);
2762 /* Recognize an IF-THEN-ELSE-JOIN block. */
2763 if (single_pred_p (then_edge
->dest
)
2764 && single_succ_p (then_edge
->dest
)
2765 && single_pred_p (else_edge
->dest
)
2766 && single_succ_p (else_edge
->dest
)
2767 && single_succ (then_edge
->dest
) == single_succ (else_edge
->dest
))
2769 then_bb
= then_edge
->dest
;
2770 else_bb
= else_edge
->dest
;
2771 join_bb
= single_succ (then_bb
);
2773 /* Recognize an IF-THEN-JOIN block. */
2774 else if (single_pred_p (then_edge
->dest
)
2775 && single_succ_p (then_edge
->dest
)
2776 && single_succ (then_edge
->dest
) == else_edge
->dest
)
2778 then_bb
= then_edge
->dest
;
2779 else_bb
= NULL_BLOCK
;
2780 join_bb
= else_edge
->dest
;
2782 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
2783 of basic blocks in cfglayout mode does not matter, so the fallthrough
2784 edge can go to any basic block (and not just to bb->next_bb, like in
2786 else if (single_pred_p (else_edge
->dest
)
2787 && single_succ_p (else_edge
->dest
)
2788 && single_succ (else_edge
->dest
) == then_edge
->dest
)
2790 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
2791 To make this work, we have to invert the THEN and ELSE blocks
2792 and reverse the jump condition. */
2793 then_bb
= else_edge
->dest
;
2794 else_bb
= NULL_BLOCK
;
2795 join_bb
= single_succ (then_bb
);
2796 then_else_reversed
= true;
2799 /* Not a form we can handle. */
2802 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
2803 if (single_succ_edge (then_bb
)->flags
& EDGE_COMPLEX
)
2806 && single_succ_edge (else_bb
)->flags
& EDGE_COMPLEX
)
2809 num_possible_if_blocks
++;
2814 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
2815 (else_bb
) ? "-ELSE" : "",
2816 pass
, test_bb
->index
, then_bb
->index
);
2819 fprintf (dump_file
, ", else %d", else_bb
->index
);
2821 fprintf (dump_file
, ", join %d\n", join_bb
->index
);
2824 /* If the conditional jump is more than just a conditional
2825 jump, then we can not do if-conversion on this block. */
2826 jump
= BB_END (test_bb
);
2827 if (! onlyjump_p (jump
))
2830 /* If this is not a standard conditional jump, we can't parse it. */
2831 cond
= noce_get_condition (jump
,
2833 then_else_reversed
);
2837 /* We must be comparing objects whose modes imply the size. */
2838 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
2841 /* Initialize an IF_INFO struct to pass around. */
2842 memset (&if_info
, 0, sizeof if_info
);
2843 if_info
.test_bb
= test_bb
;
2844 if_info
.then_bb
= then_bb
;
2845 if_info
.else_bb
= else_bb
;
2846 if_info
.join_bb
= join_bb
;
2847 if_info
.cond
= cond
;
2848 if_info
.cond_earliest
= cond_earliest
;
2849 if_info
.jump
= jump
;
2850 if_info
.then_else_reversed
= then_else_reversed
;
2851 if_info
.branch_cost
= BRANCH_COST (optimize_bb_for_speed_p (test_bb
),
2852 predictable_edge_p (then_edge
));
2854 /* Do the real work. */
2856 if (noce_process_if_block (&if_info
))
2859 if (HAVE_conditional_move
2860 && cond_move_process_if_block (&if_info
))
2867 /* Merge the blocks and mark for local life update. */
2870 merge_if_block (struct ce_if_block
* ce_info
)
2872 basic_block test_bb
= ce_info
->test_bb
; /* last test block */
2873 basic_block then_bb
= ce_info
->then_bb
; /* THEN */
2874 basic_block else_bb
= ce_info
->else_bb
; /* ELSE or NULL */
2875 basic_block join_bb
= ce_info
->join_bb
; /* join block */
2876 basic_block combo_bb
;
2878 /* All block merging is done into the lower block numbers. */
2881 df_set_bb_dirty (test_bb
);
2883 /* Merge any basic blocks to handle && and || subtests. Each of
2884 the blocks are on the fallthru path from the predecessor block. */
2885 if (ce_info
->num_multiple_test_blocks
> 0)
2887 basic_block bb
= test_bb
;
2888 basic_block last_test_bb
= ce_info
->last_test_bb
;
2889 basic_block fallthru
= block_fallthru (bb
);
2894 fallthru
= block_fallthru (bb
);
2895 merge_blocks (combo_bb
, bb
);
2898 while (bb
!= last_test_bb
);
2901 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2902 label, but it might if there were || tests. That label's count should be
2903 zero, and it normally should be removed. */
2907 merge_blocks (combo_bb
, then_bb
);
2911 /* The ELSE block, if it existed, had a label. That label count
2912 will almost always be zero, but odd things can happen when labels
2913 get their addresses taken. */
2916 merge_blocks (combo_bb
, else_bb
);
2920 /* If there was no join block reported, that means it was not adjacent
2921 to the others, and so we cannot merge them. */
2925 rtx last
= BB_END (combo_bb
);
2927 /* The outgoing edge for the current COMBO block should already
2928 be correct. Verify this. */
2929 if (EDGE_COUNT (combo_bb
->succs
) == 0)
2930 gcc_assert (find_reg_note (last
, REG_NORETURN
, NULL
)
2931 || (NONJUMP_INSN_P (last
)
2932 && GET_CODE (PATTERN (last
)) == TRAP_IF
2933 && (TRAP_CONDITION (PATTERN (last
))
2934 == const_true_rtx
)));
2937 /* There should still be something at the end of the THEN or ELSE
2938 blocks taking us to our final destination. */
2939 gcc_assert (JUMP_P (last
)
2940 || (EDGE_SUCC (combo_bb
, 0)->dest
== EXIT_BLOCK_PTR
2942 && SIBLING_CALL_P (last
))
2943 || ((EDGE_SUCC (combo_bb
, 0)->flags
& EDGE_EH
)
2944 && can_throw_internal (last
)));
2947 /* The JOIN block may have had quite a number of other predecessors too.
2948 Since we've already merged the TEST, THEN and ELSE blocks, we should
2949 have only one remaining edge from our if-then-else diamond. If there
2950 is more than one remaining edge, it must come from elsewhere. There
2951 may be zero incoming edges if the THEN block didn't actually join
2952 back up (as with a call to a non-return function). */
2953 else if (EDGE_COUNT (join_bb
->preds
) < 2
2954 && join_bb
!= EXIT_BLOCK_PTR
)
2956 /* We can merge the JOIN cleanly and update the dataflow try
2957 again on this pass.*/
2958 merge_blocks (combo_bb
, join_bb
);
2963 /* We cannot merge the JOIN. */
2965 /* The outgoing edge for the current COMBO block should already
2966 be correct. Verify this. */
2967 gcc_assert (single_succ_p (combo_bb
)
2968 && single_succ (combo_bb
) == join_bb
);
2970 /* Remove the jump and cruft from the end of the COMBO block. */
2971 if (join_bb
!= EXIT_BLOCK_PTR
)
2972 tidy_fallthru_edge (single_succ_edge (combo_bb
));
2975 num_updated_if_blocks
++;
2978 /* Find a block ending in a simple IF condition and try to transform it
2979 in some way. When converting a multi-block condition, put the new code
2980 in the first such block and delete the rest. Return a pointer to this
2981 first block if some transformation was done. Return NULL otherwise. */
2984 find_if_header (basic_block test_bb
, int pass
)
2986 ce_if_block_t ce_info
;
2990 /* The kind of block we're looking for has exactly two successors. */
2991 if (EDGE_COUNT (test_bb
->succs
) != 2)
2994 then_edge
= EDGE_SUCC (test_bb
, 0);
2995 else_edge
= EDGE_SUCC (test_bb
, 1);
2997 if (df_get_bb_dirty (then_edge
->dest
))
2999 if (df_get_bb_dirty (else_edge
->dest
))
3002 /* Neither edge should be abnormal. */
3003 if ((then_edge
->flags
& EDGE_COMPLEX
)
3004 || (else_edge
->flags
& EDGE_COMPLEX
))
3007 /* Nor exit the loop. */
3008 if ((then_edge
->flags
& EDGE_LOOP_EXIT
)
3009 || (else_edge
->flags
& EDGE_LOOP_EXIT
))
3012 /* The THEN edge is canonically the one that falls through. */
3013 if (then_edge
->flags
& EDGE_FALLTHRU
)
3015 else if (else_edge
->flags
& EDGE_FALLTHRU
)
3018 else_edge
= then_edge
;
3022 /* Otherwise this must be a multiway branch of some sort. */
3025 memset (&ce_info
, '\0', sizeof (ce_info
));
3026 ce_info
.test_bb
= test_bb
;
3027 ce_info
.then_bb
= then_edge
->dest
;
3028 ce_info
.else_bb
= else_edge
->dest
;
3029 ce_info
.pass
= pass
;
3031 #ifdef IFCVT_INIT_EXTRA_FIELDS
3032 IFCVT_INIT_EXTRA_FIELDS (&ce_info
);
3035 if (! reload_completed
3036 && noce_find_if_block (test_bb
, then_edge
, else_edge
, pass
))
3039 if (HAVE_conditional_execution
&& reload_completed
3040 && cond_exec_find_if_block (&ce_info
))
3044 && optab_handler (ctrap_optab
, word_mode
)->insn_code
!= CODE_FOR_nothing
3045 && find_cond_trap (test_bb
, then_edge
, else_edge
))
3048 if (dom_info_state (CDI_POST_DOMINATORS
) >= DOM_NO_FAST_QUERY
3049 && (! HAVE_conditional_execution
|| reload_completed
))
3051 if (find_if_case_1 (test_bb
, then_edge
, else_edge
))
3053 if (find_if_case_2 (test_bb
, then_edge
, else_edge
))
3061 fprintf (dump_file
, "Conversion succeeded on pass %d.\n", pass
);
3062 /* Set this so we continue looking. */
3063 cond_exec_changed_p
= TRUE
;
3064 return ce_info
.test_bb
;
3067 /* Return true if a block has two edges, one of which falls through to the next
3068 block, and the other jumps to a specific block, so that we can tell if the
3069 block is part of an && test or an || test. Returns either -1 or the number
3070 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3073 block_jumps_and_fallthru_p (basic_block cur_bb
, basic_block target_bb
)
3076 int fallthru_p
= FALSE
;
3083 if (!cur_bb
|| !target_bb
)
3086 /* If no edges, obviously it doesn't jump or fallthru. */
3087 if (EDGE_COUNT (cur_bb
->succs
) == 0)
3090 FOR_EACH_EDGE (cur_edge
, ei
, cur_bb
->succs
)
3092 if (cur_edge
->flags
& EDGE_COMPLEX
)
3093 /* Anything complex isn't what we want. */
3096 else if (cur_edge
->flags
& EDGE_FALLTHRU
)
3099 else if (cur_edge
->dest
== target_bb
)
3106 if ((jump_p
& fallthru_p
) == 0)
3109 /* Don't allow calls in the block, since this is used to group && and ||
3110 together for conditional execution support. ??? we should support
3111 conditional execution support across calls for IA-64 some day, but
3112 for now it makes the code simpler. */
3113 end
= BB_END (cur_bb
);
3114 insn
= BB_HEAD (cur_bb
);
3116 while (insn
!= NULL_RTX
)
3123 && GET_CODE (PATTERN (insn
)) != USE
3124 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
3130 insn
= NEXT_INSN (insn
);
3136 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3137 block. If so, we'll try to convert the insns to not require the branch.
3138 Return TRUE if we were successful at converting the block. */
3141 cond_exec_find_if_block (struct ce_if_block
* ce_info
)
3143 basic_block test_bb
= ce_info
->test_bb
;
3144 basic_block then_bb
= ce_info
->then_bb
;
3145 basic_block else_bb
= ce_info
->else_bb
;
3146 basic_block join_bb
= NULL_BLOCK
;
3151 ce_info
->last_test_bb
= test_bb
;
3153 /* We only ever should get here after reload,
3154 and only if we have conditional execution. */
3155 gcc_assert (HAVE_conditional_execution
&& reload_completed
);
3157 /* Discover if any fall through predecessors of the current test basic block
3158 were && tests (which jump to the else block) or || tests (which jump to
3160 if (single_pred_p (test_bb
)
3161 && single_pred_edge (test_bb
)->flags
== EDGE_FALLTHRU
)
3163 basic_block bb
= single_pred (test_bb
);
3164 basic_block target_bb
;
3165 int max_insns
= MAX_CONDITIONAL_EXECUTE
;
3168 /* Determine if the preceding block is an && or || block. */
3169 if ((n_insns
= block_jumps_and_fallthru_p (bb
, else_bb
)) >= 0)
3171 ce_info
->and_and_p
= TRUE
;
3172 target_bb
= else_bb
;
3174 else if ((n_insns
= block_jumps_and_fallthru_p (bb
, then_bb
)) >= 0)
3176 ce_info
->and_and_p
= FALSE
;
3177 target_bb
= then_bb
;
3180 target_bb
= NULL_BLOCK
;
3182 if (target_bb
&& n_insns
<= max_insns
)
3184 int total_insns
= 0;
3187 ce_info
->last_test_bb
= test_bb
;
3189 /* Found at least one && or || block, look for more. */
3192 ce_info
->test_bb
= test_bb
= bb
;
3193 total_insns
+= n_insns
;
3196 if (!single_pred_p (bb
))
3199 bb
= single_pred (bb
);
3200 n_insns
= block_jumps_and_fallthru_p (bb
, target_bb
);
3202 while (n_insns
>= 0 && (total_insns
+ n_insns
) <= max_insns
);
3204 ce_info
->num_multiple_test_blocks
= blocks
;
3205 ce_info
->num_multiple_test_insns
= total_insns
;
3207 if (ce_info
->and_and_p
)
3208 ce_info
->num_and_and_blocks
= blocks
;
3210 ce_info
->num_or_or_blocks
= blocks
;
3214 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3215 other than any || blocks which jump to the THEN block. */
3216 if ((EDGE_COUNT (then_bb
->preds
) - ce_info
->num_or_or_blocks
) != 1)
3219 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3220 FOR_EACH_EDGE (cur_edge
, ei
, then_bb
->preds
)
3222 if (cur_edge
->flags
& EDGE_COMPLEX
)
3226 FOR_EACH_EDGE (cur_edge
, ei
, else_bb
->preds
)
3228 if (cur_edge
->flags
& EDGE_COMPLEX
)
3232 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3233 if (EDGE_COUNT (then_bb
->succs
) > 0
3234 && (!single_succ_p (then_bb
)
3235 || (single_succ_edge (then_bb
)->flags
& EDGE_COMPLEX
)
3236 || (epilogue_completed
&& tablejump_p (BB_END (then_bb
), NULL
, NULL
))))
3239 /* If the THEN block has no successors, conditional execution can still
3240 make a conditional call. Don't do this unless the ELSE block has
3241 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3242 Check for the last insn of the THEN block being an indirect jump, which
3243 is listed as not having any successors, but confuses the rest of the CE
3244 code processing. ??? we should fix this in the future. */
3245 if (EDGE_COUNT (then_bb
->succs
) == 0)
3247 if (single_pred_p (else_bb
))
3249 rtx last_insn
= BB_END (then_bb
);
3252 && NOTE_P (last_insn
)
3253 && last_insn
!= BB_HEAD (then_bb
))
3254 last_insn
= PREV_INSN (last_insn
);
3257 && JUMP_P (last_insn
)
3258 && ! simplejump_p (last_insn
))
3262 else_bb
= NULL_BLOCK
;
3268 /* If the THEN block's successor is the other edge out of the TEST block,
3269 then we have an IF-THEN combo without an ELSE. */
3270 else if (single_succ (then_bb
) == else_bb
)
3273 else_bb
= NULL_BLOCK
;
3276 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3277 has exactly one predecessor and one successor, and the outgoing edge
3278 is not complex, then we have an IF-THEN-ELSE combo. */
3279 else if (single_succ_p (else_bb
)
3280 && single_succ (then_bb
) == single_succ (else_bb
)
3281 && single_pred_p (else_bb
)
3282 && ! (single_succ_edge (else_bb
)->flags
& EDGE_COMPLEX
)
3283 && ! (epilogue_completed
&& tablejump_p (BB_END (else_bb
), NULL
, NULL
)))
3284 join_bb
= single_succ (else_bb
);
3286 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3290 num_possible_if_blocks
++;
3295 "\nIF-THEN%s block found, pass %d, start block %d "
3296 "[insn %d], then %d [%d]",
3297 (else_bb
) ? "-ELSE" : "",
3300 BB_HEAD (test_bb
) ? (int)INSN_UID (BB_HEAD (test_bb
)) : -1,
3302 BB_HEAD (then_bb
) ? (int)INSN_UID (BB_HEAD (then_bb
)) : -1);
3305 fprintf (dump_file
, ", else %d [%d]",
3307 BB_HEAD (else_bb
) ? (int)INSN_UID (BB_HEAD (else_bb
)) : -1);
3309 fprintf (dump_file
, ", join %d [%d]",
3311 BB_HEAD (join_bb
) ? (int)INSN_UID (BB_HEAD (join_bb
)) : -1);
3313 if (ce_info
->num_multiple_test_blocks
> 0)
3314 fprintf (dump_file
, ", %d %s block%s last test %d [%d]",
3315 ce_info
->num_multiple_test_blocks
,
3316 (ce_info
->and_and_p
) ? "&&" : "||",
3317 (ce_info
->num_multiple_test_blocks
== 1) ? "" : "s",
3318 ce_info
->last_test_bb
->index
,
3319 ((BB_HEAD (ce_info
->last_test_bb
))
3320 ? (int)INSN_UID (BB_HEAD (ce_info
->last_test_bb
))
3323 fputc ('\n', dump_file
);
3326 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3327 first condition for free, since we've already asserted that there's a
3328 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3329 we checked the FALLTHRU flag, those are already adjacent to the last IF
3331 /* ??? As an enhancement, move the ELSE block. Have to deal with
3332 BLOCK notes, if by no other means than backing out the merge if they
3333 exist. Sticky enough I don't want to think about it now. */
3335 if (else_bb
&& (next
= next
->next_bb
) != else_bb
)
3337 if ((next
= next
->next_bb
) != join_bb
&& join_bb
!= EXIT_BLOCK_PTR
)
3345 /* Do the real work. */
3347 ce_info
->else_bb
= else_bb
;
3348 ce_info
->join_bb
= join_bb
;
3350 /* If we have && and || tests, try to first handle combining the && and ||
3351 tests into the conditional code, and if that fails, go back and handle
3352 it without the && and ||, which at present handles the && case if there
3353 was no ELSE block. */
3354 if (cond_exec_process_if_block (ce_info
, TRUE
))
3357 if (ce_info
->num_multiple_test_blocks
)
3361 if (cond_exec_process_if_block (ce_info
, FALSE
))
3368 /* Convert a branch over a trap, or a branch
3369 to a trap, into a conditional trap. */
3372 find_cond_trap (basic_block test_bb
, edge then_edge
, edge else_edge
)
3374 basic_block then_bb
= then_edge
->dest
;
3375 basic_block else_bb
= else_edge
->dest
;
3376 basic_block other_bb
, trap_bb
;
3377 rtx trap
, jump
, cond
, cond_earliest
, seq
;
3380 /* Locate the block with the trap instruction. */
3381 /* ??? While we look for no successors, we really ought to allow
3382 EH successors. Need to fix merge_if_block for that to work. */
3383 if ((trap
= block_has_only_trap (then_bb
)) != NULL
)
3384 trap_bb
= then_bb
, other_bb
= else_bb
;
3385 else if ((trap
= block_has_only_trap (else_bb
)) != NULL
)
3386 trap_bb
= else_bb
, other_bb
= then_bb
;
3392 fprintf (dump_file
, "\nTRAP-IF block found, start %d, trap %d\n",
3393 test_bb
->index
, trap_bb
->index
);
3396 /* If this is not a standard conditional jump, we can't parse it. */
3397 jump
= BB_END (test_bb
);
3398 cond
= noce_get_condition (jump
, &cond_earliest
, false);
3402 /* If the conditional jump is more than just a conditional jump, then
3403 we can not do if-conversion on this block. */
3404 if (! onlyjump_p (jump
))
3407 /* We must be comparing objects whose modes imply the size. */
3408 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
3411 /* Reverse the comparison code, if necessary. */
3412 code
= GET_CODE (cond
);
3413 if (then_bb
== trap_bb
)
3415 code
= reversed_comparison_code (cond
, jump
);
3416 if (code
== UNKNOWN
)
3420 /* Attempt to generate the conditional trap. */
3421 seq
= gen_cond_trap (code
, copy_rtx (XEXP (cond
, 0)),
3422 copy_rtx (XEXP (cond
, 1)),
3423 TRAP_CODE (PATTERN (trap
)));
3427 /* Emit the new insns before cond_earliest. */
3428 emit_insn_before_setloc (seq
, cond_earliest
, INSN_LOCATOR (trap
));
3430 /* Delete the trap block if possible. */
3431 remove_edge (trap_bb
== then_bb
? then_edge
: else_edge
);
3432 df_set_bb_dirty (test_bb
);
3433 df_set_bb_dirty (then_bb
);
3434 df_set_bb_dirty (else_bb
);
3436 if (EDGE_COUNT (trap_bb
->preds
) == 0)
3438 delete_basic_block (trap_bb
);
3442 /* Wire together the blocks again. */
3443 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
3444 single_succ_edge (test_bb
)->flags
|= EDGE_FALLTHRU
;
3449 lab
= JUMP_LABEL (jump
);
3450 newjump
= emit_jump_insn_after (gen_jump (lab
), jump
);
3451 LABEL_NUSES (lab
) += 1;
3452 JUMP_LABEL (newjump
) = lab
;
3453 emit_barrier_after (newjump
);
3457 if (can_merge_blocks_p (test_bb
, other_bb
))
3459 merge_blocks (test_bb
, other_bb
);
3463 num_updated_if_blocks
++;
3467 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3471 block_has_only_trap (basic_block bb
)
3475 /* We're not the exit block. */
3476 if (bb
== EXIT_BLOCK_PTR
)
3479 /* The block must have no successors. */
3480 if (EDGE_COUNT (bb
->succs
) > 0)
3483 /* The only instruction in the THEN block must be the trap. */
3484 trap
= first_active_insn (bb
);
3485 if (! (trap
== BB_END (bb
)
3486 && GET_CODE (PATTERN (trap
)) == TRAP_IF
3487 && TRAP_CONDITION (PATTERN (trap
)) == const_true_rtx
))
3493 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3494 transformable, but not necessarily the other. There need be no
3497 Return TRUE if we were successful at converting the block.
3499 Cases we'd like to look at:
3502 if (test) goto over; // x not live
3510 if (! test) goto label;
3513 if (test) goto E; // x not live
3527 (3) // This one's really only interesting for targets that can do
3528 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3529 // it results in multiple branches on a cache line, which often
3530 // does not sit well with predictors.
3532 if (test1) goto E; // predicted not taken
3548 (A) Don't do (2) if the branch is predicted against the block we're
3549 eliminating. Do it anyway if we can eliminate a branch; this requires
3550 that the sole successor of the eliminated block postdominate the other
3553 (B) With CE, on (3) we can steal from both sides of the if, creating
3562 Again, this is most useful if J postdominates.
3564 (C) CE substitutes for helpful life information.
3566 (D) These heuristics need a lot of work. */
3568 /* Tests for case 1 above. */
3571 find_if_case_1 (basic_block test_bb
, edge then_edge
, edge else_edge
)
3573 basic_block then_bb
= then_edge
->dest
;
3574 basic_block else_bb
= else_edge
->dest
;
3578 /* If we are partitioning hot/cold basic blocks, we don't want to
3579 mess up unconditional or indirect jumps that cross between hot
3582 Basic block partitioning may result in some jumps that appear to
3583 be optimizable (or blocks that appear to be mergeable), but which really
3584 must be left untouched (they are required to make it safely across
3585 partition boundaries). See the comments at the top of
3586 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3588 if ((BB_END (then_bb
)
3589 && find_reg_note (BB_END (then_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
3590 || (BB_END (test_bb
)
3591 && find_reg_note (BB_END (test_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
3592 || (BB_END (else_bb
)
3593 && find_reg_note (BB_END (else_bb
), REG_CROSSING_JUMP
,
3597 /* THEN has one successor. */
3598 if (!single_succ_p (then_bb
))
3601 /* THEN does not fall through, but is not strange either. */
3602 if (single_succ_edge (then_bb
)->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
))
3605 /* THEN has one predecessor. */
3606 if (!single_pred_p (then_bb
))
3609 /* THEN must do something. */
3610 if (forwarder_block_p (then_bb
))
3613 num_possible_if_blocks
++;
3616 "\nIF-CASE-1 found, start %d, then %d\n",
3617 test_bb
->index
, then_bb
->index
);
3619 /* THEN is small. */
3620 if (! cheap_bb_rtx_cost_p (then_bb
,
3621 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge
->src
),
3622 predictable_edge_p (then_edge
)))))
3625 /* Registers set are dead, or are predicable. */
3626 if (! dead_or_predicable (test_bb
, then_bb
, else_bb
,
3627 single_succ (then_bb
), 1))
3630 /* Conversion went ok, including moving the insns and fixing up the
3631 jump. Adjust the CFG to match. */
3633 /* We can avoid creating a new basic block if then_bb is immediately
3634 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3637 if (then_bb
->next_bb
== else_bb
3638 && then_bb
->prev_bb
== test_bb
3639 && else_bb
!= EXIT_BLOCK_PTR
)
3641 redirect_edge_succ (FALLTHRU_EDGE (test_bb
), else_bb
);
3645 new_bb
= redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb
),
3648 df_set_bb_dirty (test_bb
);
3649 df_set_bb_dirty (else_bb
);
3651 then_bb_index
= then_bb
->index
;
3652 delete_basic_block (then_bb
);
3654 /* Make rest of code believe that the newly created block is the THEN_BB
3655 block we removed. */
3658 df_bb_replace (then_bb_index
, new_bb
);
3659 /* Since the fallthru edge was redirected from test_bb to new_bb,
3660 we need to ensure that new_bb is in the same partition as
3661 test bb (you can not fall through across section boundaries). */
3662 BB_COPY_PARTITION (new_bb
, test_bb
);
3666 num_updated_if_blocks
++;
3671 /* Test for case 2 above. */
3674 find_if_case_2 (basic_block test_bb
, edge then_edge
, edge else_edge
)
3676 basic_block then_bb
= then_edge
->dest
;
3677 basic_block else_bb
= else_edge
->dest
;
3681 /* If we are partitioning hot/cold basic blocks, we don't want to
3682 mess up unconditional or indirect jumps that cross between hot
3685 Basic block partitioning may result in some jumps that appear to
3686 be optimizable (or blocks that appear to be mergeable), but which really
3687 must be left untouched (they are required to make it safely across
3688 partition boundaries). See the comments at the top of
3689 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3691 if ((BB_END (then_bb
)
3692 && find_reg_note (BB_END (then_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
3693 || (BB_END (test_bb
)
3694 && find_reg_note (BB_END (test_bb
), REG_CROSSING_JUMP
, NULL_RTX
))
3695 || (BB_END (else_bb
)
3696 && find_reg_note (BB_END (else_bb
), REG_CROSSING_JUMP
,
3700 /* ELSE has one successor. */
3701 if (!single_succ_p (else_bb
))
3704 else_succ
= single_succ_edge (else_bb
);
3706 /* ELSE outgoing edge is not complex. */
3707 if (else_succ
->flags
& EDGE_COMPLEX
)
3710 /* ELSE has one predecessor. */
3711 if (!single_pred_p (else_bb
))
3714 /* THEN is not EXIT. */
3715 if (then_bb
->index
< NUM_FIXED_BLOCKS
)
3718 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
3719 note
= find_reg_note (BB_END (test_bb
), REG_BR_PROB
, NULL_RTX
);
3720 if (note
&& INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2)
3722 else if (else_succ
->dest
->index
< NUM_FIXED_BLOCKS
3723 || dominated_by_p (CDI_POST_DOMINATORS
, then_bb
,
3729 num_possible_if_blocks
++;
3732 "\nIF-CASE-2 found, start %d, else %d\n",
3733 test_bb
->index
, else_bb
->index
);
3735 /* ELSE is small. */
3736 if (! cheap_bb_rtx_cost_p (else_bb
,
3737 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge
->src
),
3738 predictable_edge_p (else_edge
)))))
3741 /* Registers set are dead, or are predicable. */
3742 if (! dead_or_predicable (test_bb
, else_bb
, then_bb
, else_succ
->dest
, 0))
3745 /* Conversion went ok, including moving the insns and fixing up the
3746 jump. Adjust the CFG to match. */
3748 df_set_bb_dirty (test_bb
);
3749 df_set_bb_dirty (then_bb
);
3750 delete_basic_block (else_bb
);
3753 num_updated_if_blocks
++;
3755 /* ??? We may now fallthru from one of THEN's successors into a join
3756 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3761 /* A subroutine of dead_or_predicable called through for_each_rtx.
3762 Return 1 if a memory is found. */
3765 find_memory (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3770 /* Used by the code above to perform the actual rtl transformations.
3771 Return TRUE if successful.
3773 TEST_BB is the block containing the conditional branch. MERGE_BB
3774 is the block containing the code to manipulate. NEW_DEST is the
3775 label TEST_BB should be branching to after the conversion.
3776 REVERSEP is true if the sense of the branch should be reversed. */
3779 dead_or_predicable (basic_block test_bb
, basic_block merge_bb
,
3780 basic_block other_bb
, basic_block new_dest
, int reversep
)
3782 rtx head
, end
, jump
, earliest
= NULL_RTX
, old_dest
, new_label
= NULL_RTX
;
3783 /* Number of pending changes. */
3784 int n_validated_changes
= 0;
3786 jump
= BB_END (test_bb
);
3788 /* Find the extent of the real code in the merge block. */
3789 head
= BB_HEAD (merge_bb
);
3790 end
= BB_END (merge_bb
);
3792 /* If merge_bb ends with a tablejump, predicating/moving insn's
3793 into test_bb and then deleting merge_bb will result in the jumptable
3794 that follows merge_bb being removed along with merge_bb and then we
3795 get an unresolved reference to the jumptable. */
3796 if (tablejump_p (end
, NULL
, NULL
))
3800 head
= NEXT_INSN (head
);
3805 head
= end
= NULL_RTX
;
3808 head
= NEXT_INSN (head
);
3815 head
= end
= NULL_RTX
;
3818 end
= PREV_INSN (end
);
3821 /* Disable handling dead code by conditional execution if the machine needs
3822 to do anything funny with the tests, etc. */
3823 #ifndef IFCVT_MODIFY_TESTS
3824 if (HAVE_conditional_execution
)
3826 /* In the conditional execution case, we have things easy. We know
3827 the condition is reversible. We don't have to check life info
3828 because we're going to conditionally execute the code anyway.
3829 All that's left is making sure the insns involved can actually
3834 cond
= cond_exec_get_condition (jump
);
3838 prob_val
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
3840 prob_val
= XEXP (prob_val
, 0);
3844 enum rtx_code rev
= reversed_comparison_code (cond
, jump
);
3847 cond
= gen_rtx_fmt_ee (rev
, GET_MODE (cond
), XEXP (cond
, 0),
3850 prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (prob_val
));
3853 if (cond_exec_process_insns (NULL
, head
, end
, cond
, prob_val
, 0)
3854 && verify_changes (0))
3855 n_validated_changes
= num_validated_changes ();
3862 /* Try the NCE path if the CE path did not result in any changes. */
3863 if (n_validated_changes
== 0)
3865 /* In the non-conditional execution case, we have to verify that there
3866 are no trapping operations, no calls, no references to memory, and
3867 that any registers modified are dead at the branch site. */
3869 rtx insn
, cond
, prev
;
3870 bitmap merge_set
, test_live
, test_set
;
3871 unsigned i
, fail
= 0;
3874 /* Check for no calls or trapping operations. */
3875 for (insn
= head
; ; insn
= NEXT_INSN (insn
))
3881 if (may_trap_p (PATTERN (insn
)))
3884 /* ??? Even non-trapping memories such as stack frame
3885 references must be avoided. For stores, we collect
3886 no lifetime info; for reads, we'd have to assert
3887 true_dependence false against every store in the
3889 if (for_each_rtx (&PATTERN (insn
), find_memory
, NULL
))
3896 if (! any_condjump_p (jump
))
3899 /* Find the extent of the conditional. */
3900 cond
= noce_get_condition (jump
, &earliest
, false);
3905 MERGE_SET = set of registers set in MERGE_BB
3906 TEST_LIVE = set of registers live at EARLIEST
3907 TEST_SET = set of registers set between EARLIEST and the
3908 end of the block. */
3910 merge_set
= BITMAP_ALLOC (®_obstack
);
3911 test_live
= BITMAP_ALLOC (®_obstack
);
3912 test_set
= BITMAP_ALLOC (®_obstack
);
3914 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3915 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3916 since we've already asserted that MERGE_BB is small. */
3917 /* If we allocated new pseudos (e.g. in the conditional move
3918 expander called from noce_emit_cmove), we must resize the
3920 if (max_regno
< max_reg_num ())
3921 max_regno
= max_reg_num ();
3923 FOR_BB_INSNS (merge_bb
, insn
)
3927 unsigned int uid
= INSN_UID (insn
);
3929 for (def_rec
= DF_INSN_UID_DEFS (uid
); *def_rec
; def_rec
++)
3931 df_ref def
= *def_rec
;
3932 bitmap_set_bit (merge_set
, DF_REF_REGNO (def
));
3937 /* For small register class machines, don't lengthen lifetimes of
3938 hard registers before reload. */
3939 if (SMALL_REGISTER_CLASSES
&& ! reload_completed
)
3941 EXECUTE_IF_SET_IN_BITMAP (merge_set
, 0, i
, bi
)
3943 if (i
< FIRST_PSEUDO_REGISTER
3945 && ! global_regs
[i
])
3950 /* For TEST, we're interested in a range of insns, not a whole block.
3951 Moreover, we're interested in the insns live from OTHER_BB. */
3953 /* The loop below takes the set of live registers
3954 after JUMP, and calculates the live set before EARLIEST. */
3955 bitmap_copy (test_live
, df_get_live_in (other_bb
));
3956 df_simulate_initialize_backwards (test_bb
, test_live
);
3957 for (insn
= jump
; ; insn
= prev
)
3961 df_simulate_find_defs (insn
, test_set
);
3962 df_simulate_one_insn_backwards (test_bb
, insn
, test_live
);
3964 prev
= PREV_INSN (insn
);
3965 if (insn
== earliest
)
3969 /* We can perform the transformation if
3970 MERGE_SET & (TEST_SET | TEST_LIVE)
3972 TEST_SET & DF_LIVE_IN (merge_bb)
3975 if (bitmap_intersect_p (test_set
, merge_set
)
3976 || bitmap_intersect_p (test_live
, merge_set
)
3977 || bitmap_intersect_p (test_set
, df_get_live_in (merge_bb
)))
3980 BITMAP_FREE (merge_set
);
3981 BITMAP_FREE (test_live
);
3982 BITMAP_FREE (test_set
);
3989 /* We don't want to use normal invert_jump or redirect_jump because
3990 we don't want to delete_insn called. Also, we want to do our own
3991 change group management. */
3993 old_dest
= JUMP_LABEL (jump
);
3994 if (other_bb
!= new_dest
)
3996 new_label
= block_label (new_dest
);
3998 ? ! invert_jump_1 (jump
, new_label
)
3999 : ! redirect_jump_1 (jump
, new_label
))
4003 if (verify_changes (n_validated_changes
))
4004 confirm_change_group ();
4008 if (other_bb
!= new_dest
)
4010 redirect_jump_2 (jump
, old_dest
, new_label
, 0, reversep
);
4012 redirect_edge_succ (BRANCH_EDGE (test_bb
), new_dest
);
4015 gcov_type count
, probability
;
4016 count
= BRANCH_EDGE (test_bb
)->count
;
4017 BRANCH_EDGE (test_bb
)->count
= FALLTHRU_EDGE (test_bb
)->count
;
4018 FALLTHRU_EDGE (test_bb
)->count
= count
;
4019 probability
= BRANCH_EDGE (test_bb
)->probability
;
4020 BRANCH_EDGE (test_bb
)->probability
4021 = FALLTHRU_EDGE (test_bb
)->probability
;
4022 FALLTHRU_EDGE (test_bb
)->probability
= probability
;
4023 update_br_prob_note (test_bb
);
4027 /* Move the insns out of MERGE_BB to before the branch. */
4032 if (end
== BB_END (merge_bb
))
4033 BB_END (merge_bb
) = PREV_INSN (head
);
4035 /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
4036 notes might become invalid. */
4042 if (! INSN_P (insn
))
4044 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4047 set
= single_set (insn
);
4048 if (!set
|| !function_invariant_p (SET_SRC (set
)))
4049 remove_note (insn
, note
);
4050 } while (insn
!= end
&& (insn
= NEXT_INSN (insn
)));
4052 reorder_insns (head
, end
, PREV_INSN (earliest
));
4055 /* Remove the jump and edge if we can. */
4056 if (other_bb
== new_dest
)
4059 remove_edge (BRANCH_EDGE (test_bb
));
4060 /* ??? Can't merge blocks here, as then_bb is still in use.
4061 At minimum, the merge will get done just before bb-reorder. */
4071 /* Main entry point for all if-conversion. */
4081 df_live_add_problem ();
4082 df_live_set_all_dirty ();
4085 num_possible_if_blocks
= 0;
4086 num_updated_if_blocks
= 0;
4087 num_true_changes
= 0;
4089 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
4090 mark_loop_exit_edges ();
4091 loop_optimizer_finalize ();
4092 free_dominance_info (CDI_DOMINATORS
);
4094 /* Compute postdominators. */
4095 calculate_dominance_info (CDI_POST_DOMINATORS
);
4097 df_set_flags (DF_LR_RUN_DCE
);
4099 /* Go through each of the basic blocks looking for things to convert. If we
4100 have conditional execution, we make multiple passes to allow us to handle
4101 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4106 /* Only need to do dce on the first pass. */
4107 df_clear_flags (DF_LR_RUN_DCE
);
4108 cond_exec_changed_p
= FALSE
;
4111 #ifdef IFCVT_MULTIPLE_DUMPS
4112 if (dump_file
&& pass
> 1)
4113 fprintf (dump_file
, "\n\n========== Pass %d ==========\n", pass
);
4119 while (!df_get_bb_dirty (bb
)
4120 && (new_bb
= find_if_header (bb
, pass
)) != NULL
)
4124 #ifdef IFCVT_MULTIPLE_DUMPS
4125 if (dump_file
&& cond_exec_changed_p
)
4126 print_rtl_with_bb (dump_file
, get_insns ());
4129 while (cond_exec_changed_p
);
4131 #ifdef IFCVT_MULTIPLE_DUMPS
4133 fprintf (dump_file
, "\n\n========== no more changes\n");
4136 free_dominance_info (CDI_POST_DOMINATORS
);
4141 clear_aux_for_blocks ();
4143 /* If we allocated new pseudos, we must resize the array for sched1. */
4144 if (max_regno
< max_reg_num ())
4145 max_regno
= max_reg_num ();
4147 /* Write the final stats. */
4148 if (dump_file
&& num_possible_if_blocks
> 0)
4151 "\n%d possible IF blocks searched.\n",
4152 num_possible_if_blocks
);
4154 "%d IF blocks converted.\n",
4155 num_updated_if_blocks
);
4157 "%d true changes made.\n\n\n",
4162 df_remove_problem (df_live
);
4164 #ifdef ENABLE_CHECKING
4165 verify_flow_info ();
4170 gate_handle_if_conversion (void)
4172 return (optimize
> 0)
4173 && dbg_cnt (if_conversion
);
4176 /* If-conversion and CFG cleanup. */
4178 rest_of_handle_if_conversion (void)
4180 if (flag_if_conversion
)
4183 dump_flow_info (dump_file
, dump_flags
);
4184 cleanup_cfg (CLEANUP_EXPENSIVE
);
4192 struct rtl_opt_pass pass_rtl_ifcvt
=
4197 gate_handle_if_conversion
, /* gate */
4198 rest_of_handle_if_conversion
, /* execute */
4201 0, /* static_pass_number */
4202 TV_IFCVT
, /* tv_id */
4203 0, /* properties_required */
4204 0, /* properties_provided */
4205 0, /* properties_destroyed */
4206 0, /* todo_flags_start */
4207 TODO_df_finish
| TODO_verify_rtl_sharing
|
4208 TODO_dump_func
/* todo_flags_finish */
4213 gate_handle_if_after_combine (void)
4215 return optimize
> 0 && flag_if_conversion
4216 && dbg_cnt (if_after_combine
);
4220 /* Rerun if-conversion, as combine may have simplified things enough
4221 to now meet sequence length restrictions. */
4223 rest_of_handle_if_after_combine (void)
4229 struct rtl_opt_pass pass_if_after_combine
=
4234 gate_handle_if_after_combine
, /* gate */
4235 rest_of_handle_if_after_combine
, /* execute */
4238 0, /* static_pass_number */
4239 TV_IFCVT
, /* tv_id */
4240 0, /* properties_required */
4241 0, /* properties_provided */
4242 0, /* properties_destroyed */
4243 0, /* todo_flags_start */
4244 TODO_df_finish
| TODO_verify_rtl_sharing
|
4246 TODO_ggc_collect
/* todo_flags_finish */
4252 gate_handle_if_after_reload (void)
4254 return optimize
> 0 && flag_if_conversion2
4255 && dbg_cnt (if_after_reload
);
4259 rest_of_handle_if_after_reload (void)
4266 struct rtl_opt_pass pass_if_after_reload
=
4271 gate_handle_if_after_reload
, /* gate */
4272 rest_of_handle_if_after_reload
, /* execute */
4275 0, /* static_pass_number */
4276 TV_IFCVT2
, /* tv_id */
4277 0, /* properties_required */
4278 0, /* properties_provided */
4279 0, /* properties_destroyed */
4280 0, /* todo_flags_start */
4281 TODO_df_finish
| TODO_verify_rtl_sharing
|
4283 TODO_ggc_collect
/* todo_flags_finish */