1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
52 #ifndef MAX_CONDITIONAL_EXECUTE
53 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks
;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
64 static int num_updated_if_blocks
;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks
;
69 /* The post-dominator relation on the original block numbers. */
70 static sbitmap
*post_dominators
;
72 /* Forward references. */
73 static int count_bb_insns
PARAMS ((basic_block
));
74 static rtx first_active_insn
PARAMS ((basic_block
));
75 static int last_active_insn_p
PARAMS ((basic_block
, rtx
));
76 static int seq_contains_jump
PARAMS ((rtx
));
78 static int cond_exec_process_insns
PARAMS ((rtx
, rtx
, rtx
, rtx
, int));
79 static rtx cond_exec_get_condition
PARAMS ((rtx
));
80 static int cond_exec_process_if_block
PARAMS ((basic_block
, basic_block
,
81 basic_block
, basic_block
));
83 static rtx noce_get_condition
PARAMS ((rtx
, rtx
*));
84 static int noce_operand_ok
PARAMS ((rtx
));
85 static int noce_process_if_block
PARAMS ((basic_block
, basic_block
,
86 basic_block
, basic_block
));
88 static int process_if_block
PARAMS ((basic_block
, basic_block
,
89 basic_block
, basic_block
));
90 static void merge_if_block
PARAMS ((basic_block
, basic_block
,
91 basic_block
, basic_block
));
93 static int find_if_header
PARAMS ((basic_block
));
94 static int find_if_block
PARAMS ((basic_block
, edge
, edge
));
95 static int find_if_case_1
PARAMS ((basic_block
, edge
, edge
));
96 static int find_if_case_2
PARAMS ((basic_block
, edge
, edge
));
97 static int find_memory
PARAMS ((rtx
*, void *));
98 static int dead_or_predicable
PARAMS ((basic_block
, basic_block
,
99 basic_block
, rtx
, int));
100 static void noce_emit_move_insn
PARAMS ((rtx
, rtx
));
102 /* Abuse the basic_block AUX field to store the original block index,
103 as well as a flag indicating that the block should be rescaned for
106 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
107 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
108 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
109 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
112 /* Count the number of non-jump active insns in BB. */
123 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == INSN
)
128 insn
= NEXT_INSN (insn
);
134 /* Return the first non-jump active insn in the basic block. */
137 first_active_insn (bb
)
142 if (GET_CODE (insn
) == CODE_LABEL
)
146 insn
= NEXT_INSN (insn
);
149 while (GET_CODE (insn
) == NOTE
)
153 insn
= NEXT_INSN (insn
);
156 if (GET_CODE (insn
) == JUMP_INSN
)
162 /* Return true if INSN is the last active non-jump insn in BB. */
165 last_active_insn_p (bb
, insn
)
173 insn
= NEXT_INSN (insn
);
175 while (GET_CODE (insn
) == NOTE
);
177 return GET_CODE (insn
) == JUMP_INSN
;
180 /* It is possible, especially when having dealt with multi-word
181 arithmetic, for the expanders to have emitted jumps. Search
182 through the sequence and return TRUE if a jump exists so that
183 we can abort the conversion. */
186 seq_contains_jump (insn
)
191 if (GET_CODE (insn
) == JUMP_INSN
)
193 insn
= NEXT_INSN (insn
);
198 /* Go through a bunch of insns, converting them to conditional
199 execution format if possible. Return TRUE if all of the non-note
200 insns were processed. */
203 cond_exec_process_insns (start
, end
, test
, prob_val
, mod_ok
)
204 rtx start
; /* first insn to look at */
205 rtx end
; /* last insn to look at */
206 rtx test
; /* conditional execution test */
207 rtx prob_val
; /* probability of branch taken. */
208 int mod_ok
; /* true if modifications ok last insn. */
210 int must_be_last
= FALSE
;
214 for (insn
= start
; ; insn
= NEXT_INSN (insn
))
216 if (GET_CODE (insn
) == NOTE
)
219 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
222 /* Remove USE insns that get in the way. */
223 if (reload_completed
&& GET_CODE (PATTERN (insn
)) == USE
)
225 /* ??? Ug. Actually unlinking the thing is problematic,
226 given what we'd have to coordinate with our callers. */
227 PUT_CODE (insn
, NOTE
);
228 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
229 NOTE_SOURCE_FILE (insn
) = 0;
233 /* Last insn wasn't last? */
237 if (modified_in_p (test
, insn
))
244 /* Now build the conditional form of the instruction. */
245 pattern
= PATTERN (insn
);
247 /* If the machine needs to modify the insn being conditionally executed,
248 say for example to force a constant integer operand into a temp
249 register, do so here. */
250 #ifdef IFCVT_MODIFY_INSN
251 IFCVT_MODIFY_INSN (pattern
, insn
);
256 validate_change (insn
, &PATTERN (insn
),
257 gen_rtx_COND_EXEC (VOIDmode
, copy_rtx (test
),
260 if (GET_CODE (insn
) == CALL_INSN
&& prob_val
)
261 validate_change (insn
, ®_NOTES (insn
),
262 alloc_EXPR_LIST (REG_BR_PROB
, prob_val
,
263 REG_NOTES (insn
)), 1);
273 /* Return the condition for a jump. Do not do any special processing. */
276 cond_exec_get_condition (jump
)
281 if (any_condjump_p (jump
))
282 test_if
= SET_SRC (pc_set (jump
));
285 cond
= XEXP (test_if
, 0);
287 /* If this branches to JUMP_LABEL when the condition is false,
288 reverse the condition. */
289 if (GET_CODE (XEXP (test_if
, 2)) == LABEL_REF
290 && XEXP (XEXP (test_if
, 2), 0) == JUMP_LABEL (jump
))
291 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
292 GET_MODE (cond
), XEXP (cond
, 0),
298 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
299 to conditional execution. Return TRUE if we were successful at
300 converting the the block. */
303 cond_exec_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
304 basic_block test_bb
; /* Basic block test is in */
305 basic_block then_bb
; /* Basic block for THEN block */
306 basic_block else_bb
; /* Basic block for ELSE block */
307 basic_block join_bb
; /* Basic block the join label is in */
309 rtx test_expr
; /* expression in IF_THEN_ELSE that is tested */
310 rtx then_start
; /* first insn in THEN block */
311 rtx then_end
; /* last insn + 1 in THEN block */
312 rtx else_start
= NULL_RTX
; /* first insn in ELSE block or NULL */
313 rtx else_end
= NULL_RTX
; /* last insn + 1 in ELSE block */
314 int max
; /* max # of insns to convert. */
315 int then_mod_ok
; /* whether conditional mods are ok in THEN */
316 rtx true_expr
; /* test for else block insns */
317 rtx false_expr
; /* test for then block insns */
318 rtx true_prob_val
; /* probability of else block */
319 rtx false_prob_val
; /* probability of then block */
322 /* Find the conditional jump to the ELSE or JOIN part, and isolate
324 test_expr
= cond_exec_get_condition (test_bb
->end
);
328 /* If the conditional jump is more than just a conditional jump,
329 then we can not do conditional execution conversion on this block. */
330 if (!onlyjump_p (test_bb
->end
))
333 /* Collect the bounds of where we're to search. */
335 then_start
= then_bb
->head
;
336 then_end
= then_bb
->end
;
338 /* Skip a label heading THEN block. */
339 if (GET_CODE (then_start
) == CODE_LABEL
)
340 then_start
= NEXT_INSN (then_start
);
342 /* Skip a (use (const_int 0)) or branch as the final insn. */
343 if (GET_CODE (then_end
) == INSN
344 && GET_CODE (PATTERN (then_end
)) == USE
345 && GET_CODE (XEXP (PATTERN (then_end
), 0)) == CONST_INT
)
346 then_end
= PREV_INSN (then_end
);
347 else if (GET_CODE (then_end
) == JUMP_INSN
)
348 then_end
= PREV_INSN (then_end
);
352 /* Skip the ELSE block's label. */
353 else_start
= NEXT_INSN (else_bb
->head
);
354 else_end
= else_bb
->end
;
356 /* Skip a (use (const_int 0)) or branch as the final insn. */
357 if (GET_CODE (else_end
) == INSN
358 && GET_CODE (PATTERN (else_end
)) == USE
359 && GET_CODE (XEXP (PATTERN (else_end
), 0)) == CONST_INT
)
360 else_end
= PREV_INSN (else_end
);
361 else if (GET_CODE (else_end
) == JUMP_INSN
)
362 else_end
= PREV_INSN (else_end
);
365 /* How many instructions should we convert in total? */
369 max
= 2 * MAX_CONDITIONAL_EXECUTE
;
370 n_insns
= count_bb_insns (else_bb
);
373 max
= MAX_CONDITIONAL_EXECUTE
;
374 n_insns
+= count_bb_insns (then_bb
);
378 /* Map test_expr/test_jump into the appropriate MD tests to use on
379 the conditionally executed code. */
381 true_expr
= test_expr
;
382 false_expr
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr
)),
383 GET_MODE (true_expr
), XEXP (true_expr
, 0),
384 XEXP (true_expr
, 1));
386 #ifdef IFCVT_MODIFY_TESTS
387 /* If the machine description needs to modify the tests, such as setting a
388 conditional execution register from a comparison, it can do so here. */
389 IFCVT_MODIFY_TESTS (true_expr
, false_expr
, test_bb
, then_bb
, else_bb
,
392 /* See if the conversion failed */
393 if (!true_expr
|| !false_expr
)
397 true_prob_val
= find_reg_note (test_bb
->end
, REG_BR_PROB
, NULL_RTX
);
400 true_prob_val
= XEXP (true_prob_val
, 0);
401 false_prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (true_prob_val
));
404 false_prob_val
= NULL_RTX
;
406 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
407 on then THEN block. */
408 then_mod_ok
= (else_bb
== NULL_BLOCK
);
410 /* Go through the THEN and ELSE blocks converting the insns if possible
411 to conditional execution. */
414 && ! cond_exec_process_insns (then_start
, then_end
,
415 false_expr
, false_prob_val
, then_mod_ok
))
419 && ! cond_exec_process_insns (else_start
, else_end
,
420 true_expr
, true_prob_val
, TRUE
))
423 if (! apply_change_group ())
426 #ifdef IFCVT_MODIFY_FINAL
427 /* Do any machine dependent final modifications */
428 IFCVT_MODIFY_FINAL (test_bb
, then_bb
, else_bb
, join_bb
);
431 /* Conversion succeeded. */
433 fprintf (rtl_dump_file
, "%d insn%s converted to conditional execution.\n",
434 n_insns
, (n_insns
== 1) ? " was" : "s were");
436 /* Merge the blocks! */
437 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
441 #ifdef IFCVT_MODIFY_CANCEL
442 /* Cancel any machine dependent changes. */
443 IFCVT_MODIFY_CANCEL (test_bb
, then_bb
, else_bb
, join_bb
);
450 /* Used by noce_process_if_block to communicate with its subroutines.
452 The subroutines know that A and B may be evaluated freely. They
453 know that X is a register. They should insert new instructions
454 before cond_earliest. */
461 rtx jump
, cond
, cond_earliest
;
464 static rtx noce_emit_store_flag
PARAMS ((struct noce_if_info
*,
466 static int noce_try_store_flag
PARAMS ((struct noce_if_info
*));
467 static int noce_try_store_flag_inc
PARAMS ((struct noce_if_info
*));
468 static int noce_try_store_flag_constants
PARAMS ((struct noce_if_info
*));
469 static int noce_try_store_flag_mask
PARAMS ((struct noce_if_info
*));
470 static rtx noce_emit_cmove
PARAMS ((struct noce_if_info
*,
471 rtx
, enum rtx_code
, rtx
,
473 static int noce_try_cmove
PARAMS ((struct noce_if_info
*));
474 static int noce_try_cmove_arith
PARAMS ((struct noce_if_info
*));
475 static rtx noce_get_alt_condition
PARAMS ((struct noce_if_info
*,
477 static int noce_try_minmax
PARAMS ((struct noce_if_info
*));
478 static int noce_try_abs
PARAMS ((struct noce_if_info
*));
480 /* Helper function for noce_try_store_flag*. */
483 noce_emit_store_flag (if_info
, x
, reversep
, normalize
)
484 struct noce_if_info
*if_info
;
486 int reversep
, normalize
;
488 rtx cond
= if_info
->cond
;
492 cond_complex
= (! general_operand (XEXP (cond
, 0), VOIDmode
)
493 || ! general_operand (XEXP (cond
, 1), VOIDmode
));
495 /* If earliest == jump, or when the condition is complex, try to
496 build the store_flag insn directly. */
499 cond
= XEXP (SET_SRC (pc_set (if_info
->jump
)), 0);
502 code
= reversed_comparison_code (cond
, if_info
->jump
);
504 code
= GET_CODE (cond
);
506 if ((if_info
->cond_earliest
== if_info
->jump
|| cond_complex
)
507 && (normalize
== 0 || STORE_FLAG_VALUE
== normalize
))
511 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (x
), XEXP (cond
, 0),
513 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
516 tmp
= emit_insn (tmp
);
518 if (recog_memoized (tmp
) >= 0)
524 if_info
->cond_earliest
= if_info
->jump
;
532 /* Don't even try if the comparison operands are weird. */
536 return emit_store_flag (x
, code
, XEXP (cond
, 0),
537 XEXP (cond
, 1), VOIDmode
,
538 (code
== LTU
|| code
== LEU
539 || code
== GEU
|| code
== GTU
), normalize
);
542 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
544 noce_emit_move_insn (x
, y
)
547 enum machine_mode outmode
, inmode
;
551 if (GET_CODE (x
) != STRICT_LOW_PART
)
553 emit_move_insn (x
, y
);
558 inner
= XEXP (outer
, 0);
559 outmode
= GET_MODE (outer
);
560 inmode
= GET_MODE (inner
);
561 bitpos
= SUBREG_BYTE (outer
) * BITS_PER_UNIT
;
562 store_bit_field (inner
, GET_MODE_BITSIZE (outmode
),
563 bitpos
, outmode
, y
, GET_MODE_BITSIZE (inmode
),
564 GET_MODE_BITSIZE (inmode
));
567 /* Convert "if (test) x = 1; else x = 0".
569 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
570 tried in noce_try_store_flag_constants after noce_try_cmove has had
571 a go at the conversion. */
574 noce_try_store_flag (if_info
)
575 struct noce_if_info
*if_info
;
580 if (GET_CODE (if_info
->b
) == CONST_INT
581 && INTVAL (if_info
->b
) == STORE_FLAG_VALUE
582 && if_info
->a
== const0_rtx
)
584 else if (if_info
->b
== const0_rtx
585 && GET_CODE (if_info
->a
) == CONST_INT
586 && INTVAL (if_info
->a
) == STORE_FLAG_VALUE
587 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
595 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, 0);
598 if (target
!= if_info
->x
)
599 noce_emit_move_insn (if_info
->x
, target
);
603 emit_insns_before (seq
, if_info
->cond_earliest
);
614 /* Convert "if (test) x = a; else x = b", for A and B constant. */
617 noce_try_store_flag_constants (if_info
)
618 struct noce_if_info
*if_info
;
622 HOST_WIDE_INT itrue
, ifalse
, diff
, tmp
;
623 int normalize
, can_reverse
;
626 && GET_CODE (if_info
->a
) == CONST_INT
627 && GET_CODE (if_info
->b
) == CONST_INT
)
629 ifalse
= INTVAL (if_info
->a
);
630 itrue
= INTVAL (if_info
->b
);
631 diff
= itrue
- ifalse
;
633 can_reverse
= (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
637 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
639 else if (ifalse
== 0 && exact_log2 (itrue
) >= 0
640 && (STORE_FLAG_VALUE
== 1
641 || BRANCH_COST
>= 2))
643 else if (itrue
== 0 && exact_log2 (ifalse
) >= 0 && can_reverse
644 && (STORE_FLAG_VALUE
== 1 || BRANCH_COST
>= 2))
645 normalize
= 1, reversep
= 1;
647 && (STORE_FLAG_VALUE
== -1
648 || BRANCH_COST
>= 2))
650 else if (ifalse
== -1 && can_reverse
651 && (STORE_FLAG_VALUE
== -1 || BRANCH_COST
>= 2))
652 normalize
= -1, reversep
= 1;
653 else if ((BRANCH_COST
>= 2 && STORE_FLAG_VALUE
== -1)
661 tmp
= itrue
; itrue
= ifalse
; ifalse
= tmp
;
666 target
= noce_emit_store_flag (if_info
, if_info
->x
, reversep
, normalize
);
673 /* if (test) x = 3; else x = 4;
674 => x = 3 + (test == 0); */
675 if (diff
== STORE_FLAG_VALUE
|| diff
== -STORE_FLAG_VALUE
)
677 target
= expand_binop (GET_MODE (if_info
->x
),
678 (diff
== STORE_FLAG_VALUE
679 ? add_optab
: sub_optab
),
680 GEN_INT (ifalse
), target
, if_info
->x
, 0,
684 /* if (test) x = 8; else x = 0;
685 => x = (test != 0) << 3; */
686 else if (ifalse
== 0 && (tmp
= exact_log2 (itrue
)) >= 0)
688 target
= expand_binop (GET_MODE (if_info
->x
), ashl_optab
,
689 target
, GEN_INT (tmp
), if_info
->x
, 0,
693 /* if (test) x = -1; else x = b;
694 => x = -(test != 0) | b; */
695 else if (itrue
== -1)
697 target
= expand_binop (GET_MODE (if_info
->x
), ior_optab
,
698 target
, GEN_INT (ifalse
), if_info
->x
, 0,
702 /* if (test) x = a; else x = b;
703 => x = (-(test != 0) & (b - a)) + a; */
706 target
= expand_binop (GET_MODE (if_info
->x
), and_optab
,
707 target
, GEN_INT (diff
), if_info
->x
, 0,
710 target
= expand_binop (GET_MODE (if_info
->x
), add_optab
,
711 target
, GEN_INT (ifalse
), if_info
->x
, 0,
721 if (target
!= if_info
->x
)
722 noce_emit_move_insn (if_info
->x
, target
);
727 if (seq_contains_jump (seq
))
730 emit_insns_before (seq
, if_info
->cond_earliest
);
738 /* Convert "if (test) foo++" into "foo += (test != 0)", and
739 similarly for "foo--". */
742 noce_try_store_flag_inc (if_info
)
743 struct noce_if_info
*if_info
;
746 int subtract
, normalize
;
752 /* Should be no `else' case to worry about. */
753 && if_info
->b
== if_info
->x
754 && GET_CODE (if_info
->a
) == PLUS
755 && (XEXP (if_info
->a
, 1) == const1_rtx
756 || XEXP (if_info
->a
, 1) == constm1_rtx
)
757 && rtx_equal_p (XEXP (if_info
->a
, 0), if_info
->x
)
758 && (reversed_comparison_code (if_info
->cond
, if_info
->jump
)
761 if (STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
762 subtract
= 0, normalize
= 0;
763 else if (-STORE_FLAG_VALUE
== INTVAL (XEXP (if_info
->a
, 1)))
764 subtract
= 1, normalize
= 0;
766 subtract
= 0, normalize
= INTVAL (XEXP (if_info
->a
, 1));
770 target
= noce_emit_store_flag (if_info
,
771 gen_reg_rtx (GET_MODE (if_info
->x
)),
775 target
= expand_binop (GET_MODE (if_info
->x
),
776 subtract
? sub_optab
: add_optab
,
777 if_info
->x
, target
, if_info
->x
, 0, OPTAB_WIDEN
);
780 if (target
!= if_info
->x
)
781 noce_emit_move_insn (if_info
->x
, target
);
786 if (seq_contains_jump (seq
))
789 emit_insns_before (seq
, if_info
->cond_earliest
);
800 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
803 noce_try_store_flag_mask (if_info
)
804 struct noce_if_info
*if_info
;
812 || STORE_FLAG_VALUE
== -1)
813 && ((if_info
->a
== const0_rtx
814 && rtx_equal_p (if_info
->b
, if_info
->x
))
815 || ((reversep
= (reversed_comparison_code (if_info
->cond
,
818 && if_info
->b
== const0_rtx
819 && rtx_equal_p (if_info
->a
, if_info
->x
))))
822 target
= noce_emit_store_flag (if_info
,
823 gen_reg_rtx (GET_MODE (if_info
->x
)),
826 target
= expand_binop (GET_MODE (if_info
->x
), and_optab
,
827 if_info
->x
, target
, if_info
->x
, 0,
832 if (target
!= if_info
->x
)
833 noce_emit_move_insn (if_info
->x
, target
);
838 if (seq_contains_jump (seq
))
841 emit_insns_before (seq
, if_info
->cond_earliest
);
852 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
855 noce_emit_cmove (if_info
, x
, code
, cmp_a
, cmp_b
, vfalse
, vtrue
)
856 struct noce_if_info
*if_info
;
857 rtx x
, cmp_a
, cmp_b
, vfalse
, vtrue
;
860 /* If earliest == jump, try to build the cmove insn directly.
861 This is helpful when combine has created some complex condition
862 (like for alpha's cmovlbs) that we can't hope to regenerate
863 through the normal interface. */
865 if (if_info
->cond_earliest
== if_info
->jump
)
869 tmp
= gen_rtx_fmt_ee (code
, GET_MODE (if_info
->cond
), cmp_a
, cmp_b
);
870 tmp
= gen_rtx_IF_THEN_ELSE (GET_MODE (x
), tmp
, vtrue
, vfalse
);
871 tmp
= gen_rtx_SET (VOIDmode
, x
, tmp
);
874 tmp
= emit_insn (tmp
);
876 if (recog_memoized (tmp
) >= 0)
888 /* Don't even try if the comparison operands are weird. */
889 if (! general_operand (cmp_a
, GET_MODE (cmp_a
))
890 || ! general_operand (cmp_b
, GET_MODE (cmp_b
)))
893 #if HAVE_conditional_move
894 return emit_conditional_move (x
, code
, cmp_a
, cmp_b
, VOIDmode
,
895 vtrue
, vfalse
, GET_MODE (x
),
896 (code
== LTU
|| code
== GEU
897 || code
== LEU
|| code
== GTU
));
899 /* We'll never get here, as noce_process_if_block doesn't call the
900 functions involved. Ifdef code, however, should be discouraged
901 because it leads to typos in the code not selected. However,
902 emit_conditional_move won't exist either. */
907 /* Try only simple constants and registers here. More complex cases
908 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
909 has had a go at it. */
912 noce_try_cmove (if_info
)
913 struct noce_if_info
*if_info
;
918 if ((CONSTANT_P (if_info
->a
) || register_operand (if_info
->a
, VOIDmode
))
919 && (CONSTANT_P (if_info
->b
) || register_operand (if_info
->b
, VOIDmode
)))
923 code
= GET_CODE (if_info
->cond
);
924 target
= noce_emit_cmove (if_info
, if_info
->x
, code
,
925 XEXP (if_info
->cond
, 0),
926 XEXP (if_info
->cond
, 1),
927 if_info
->a
, if_info
->b
);
931 if (target
!= if_info
->x
)
932 noce_emit_move_insn (if_info
->x
, target
);
936 emit_insns_before (seq
, if_info
->cond_earliest
);
949 /* Try more complex cases involving conditional_move. */
952 noce_try_cmove_arith (if_info
)
953 struct noce_if_info
*if_info
;
963 /* A conditional move from two memory sources is equivalent to a
964 conditional on their addresses followed by a load. Don't do this
965 early because it'll screw alias analysis. Note that we've
966 already checked for no side effects. */
967 if (! no_new_pseudos
&& cse_not_expected
968 && GET_CODE (a
) == MEM
&& GET_CODE (b
) == MEM
973 x
= gen_reg_rtx (Pmode
);
977 /* ??? We could handle this if we knew that a load from A or B could
978 not fault. This is also true if we've already loaded
979 from the address along the path from ENTRY. */
980 else if (may_trap_p (a
) || may_trap_p (b
))
983 /* if (test) x = a + b; else x = c - d;
990 code
= GET_CODE (if_info
->cond
);
991 insn_a
= if_info
->insn_a
;
992 insn_b
= if_info
->insn_b
;
994 /* Possibly rearrange operands to make things come out more natural. */
995 if (reversed_comparison_code (if_info
->cond
, if_info
->jump
) != UNKNOWN
)
998 if (rtx_equal_p (b
, x
))
1000 else if (general_operand (b
, GET_MODE (b
)))
1005 code
= reversed_comparison_code (if_info
->cond
, if_info
->jump
);
1006 tmp
= a
, a
= b
, b
= tmp
;
1007 tmp
= insn_a
, insn_a
= insn_b
, insn_b
= tmp
;
1013 /* If either operand is complex, load it into a register first.
1014 The best way to do this is to copy the original insn. In this
1015 way we preserve any clobbers etc that the insn may have had.
1016 This is of course not possible in the IS_MEM case. */
1017 if (! general_operand (a
, GET_MODE (a
)))
1022 goto end_seq_and_fail
;
1026 tmp
= gen_reg_rtx (GET_MODE (a
));
1027 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, a
));
1030 goto end_seq_and_fail
;
1033 a
= gen_reg_rtx (GET_MODE (a
));
1034 tmp
= copy_rtx (insn_a
);
1035 set
= single_set (tmp
);
1037 tmp
= emit_insn (PATTERN (tmp
));
1039 if (recog_memoized (tmp
) < 0)
1040 goto end_seq_and_fail
;
1042 if (! general_operand (b
, GET_MODE (b
)))
1047 goto end_seq_and_fail
;
1051 tmp
= gen_reg_rtx (GET_MODE (b
));
1052 tmp
= emit_insn (gen_rtx_SET (VOIDmode
, tmp
, b
));
1055 goto end_seq_and_fail
;
1058 b
= gen_reg_rtx (GET_MODE (b
));
1059 tmp
= copy_rtx (insn_b
);
1060 set
= single_set (tmp
);
1062 tmp
= emit_insn (PATTERN (tmp
));
1064 if (recog_memoized (tmp
) < 0)
1065 goto end_seq_and_fail
;
1068 target
= noce_emit_cmove (if_info
, x
, code
, XEXP (if_info
->cond
, 0),
1069 XEXP (if_info
->cond
, 1), a
, b
);
1072 goto end_seq_and_fail
;
1074 /* If we're handling a memory for above, emit the load now. */
1077 tmp
= gen_rtx_MEM (GET_MODE (if_info
->x
), target
);
1079 /* Copy over flags as appropriate. */
1080 if (MEM_VOLATILE_P (if_info
->a
) || MEM_VOLATILE_P (if_info
->b
))
1081 MEM_VOLATILE_P (tmp
) = 1;
1082 if (MEM_IN_STRUCT_P (if_info
->a
) && MEM_IN_STRUCT_P (if_info
->b
))
1083 MEM_IN_STRUCT_P (tmp
) = 1;
1084 if (MEM_SCALAR_P (if_info
->a
) && MEM_SCALAR_P (if_info
->b
))
1085 MEM_SCALAR_P (tmp
) = 1;
1086 if (MEM_ALIAS_SET (if_info
->a
) == MEM_ALIAS_SET (if_info
->b
))
1087 MEM_ALIAS_SET (tmp
) = MEM_ALIAS_SET (if_info
->a
);
1089 noce_emit_move_insn (if_info
->x
, tmp
);
1091 else if (target
!= x
)
1092 noce_emit_move_insn (x
, target
);
1096 emit_insns_before (tmp
, if_info
->cond_earliest
);
1104 /* For most cases, the simplified condition we found is the best
1105 choice, but this is not the case for the min/max/abs transforms.
1106 For these we wish to know that it is A or B in the condition. */
1109 noce_get_alt_condition (if_info
, target
, earliest
)
1110 struct noce_if_info
*if_info
;
1114 rtx cond
, set
, insn
;
1117 /* If target is already mentioned in the known condition, return it. */
1118 if (reg_mentioned_p (target
, if_info
->cond
))
1120 *earliest
= if_info
->cond_earliest
;
1121 return if_info
->cond
;
1124 set
= pc_set (if_info
->jump
);
1125 cond
= XEXP (SET_SRC (set
), 0);
1127 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1128 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (if_info
->jump
);
1130 cond
= canonicalize_condition (if_info
->jump
, cond
, reverse
,
1132 if (! cond
|| ! reg_mentioned_p (target
, cond
))
1135 /* We almost certainly searched back to a different place.
1136 Need to re-verify correct lifetimes. */
1138 /* X may not be mentioned in the range (cond_earliest, jump]. */
1139 for (insn
= if_info
->jump
; insn
!= *earliest
; insn
= PREV_INSN (insn
))
1140 if (INSN_P (insn
) && reg_mentioned_p (if_info
->x
, insn
))
1143 /* A and B may not be modified in the range [cond_earliest, jump). */
1144 for (insn
= *earliest
; insn
!= if_info
->jump
; insn
= NEXT_INSN (insn
))
1146 && (modified_in_p (if_info
->a
, insn
)
1147 || modified_in_p (if_info
->b
, insn
)))
1153 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1156 noce_try_minmax (if_info
)
1157 struct noce_if_info
*if_info
;
1159 rtx cond
, earliest
, target
, seq
;
1164 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1168 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1169 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1170 to get the target to tell us... */
1171 if (FLOAT_MODE_P (GET_MODE (if_info
->x
))
1172 && TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
1173 && ! flag_unsafe_math_optimizations
)
1176 cond
= noce_get_alt_condition (if_info
, if_info
->a
, &earliest
);
1180 /* Verify the condition is of the form we expect, and canonicalize
1181 the comparison code. */
1182 code
= GET_CODE (cond
);
1183 if (rtx_equal_p (XEXP (cond
, 0), if_info
->a
))
1185 if (! rtx_equal_p (XEXP (cond
, 1), if_info
->b
))
1188 else if (rtx_equal_p (XEXP (cond
, 1), if_info
->a
))
1190 if (! rtx_equal_p (XEXP (cond
, 0), if_info
->b
))
1192 code
= swap_condition (code
);
1197 /* Determine what sort of operation this is. Note that the code is for
1198 a taken branch, so the code->operation mapping appears backwards. */
1231 target
= expand_binop (GET_MODE (if_info
->x
), op
, if_info
->a
, if_info
->b
,
1232 if_info
->x
, unsignedp
, OPTAB_WIDEN
);
1238 if (target
!= if_info
->x
)
1239 noce_emit_move_insn (if_info
->x
, target
);
1244 if (seq_contains_jump (seq
))
1247 emit_insns_before (seq
, earliest
);
1248 if_info
->cond
= cond
;
1249 if_info
->cond_earliest
= earliest
;
1254 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1257 noce_try_abs (if_info
)
1258 struct noce_if_info
*if_info
;
1260 rtx cond
, earliest
, target
, seq
, a
, b
, c
;
1263 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1267 /* Recognize A and B as constituting an ABS or NABS. */
1270 if (GET_CODE (a
) == NEG
&& rtx_equal_p (XEXP (a
, 0), b
))
1272 else if (GET_CODE (b
) == NEG
&& rtx_equal_p (XEXP (b
, 0), a
))
1274 c
= a
; a
= b
; b
= c
;
1280 cond
= noce_get_alt_condition (if_info
, b
, &earliest
);
1284 /* Verify the condition is of the form we expect. */
1285 if (rtx_equal_p (XEXP (cond
, 0), b
))
1287 else if (rtx_equal_p (XEXP (cond
, 1), b
))
1292 /* Verify that C is zero. Search backward through the block for
1293 a REG_EQUAL note if necessary. */
1296 rtx insn
, note
= NULL
;
1297 for (insn
= earliest
;
1298 insn
!= if_info
->test_bb
->head
;
1299 insn
= PREV_INSN (insn
))
1301 && ((note
= find_reg_note (insn
, REG_EQUAL
, c
))
1302 || (note
= find_reg_note (insn
, REG_EQUIV
, c
))))
1308 if (GET_CODE (c
) == MEM
1309 && GET_CODE (XEXP (c
, 0)) == SYMBOL_REF
1310 && CONSTANT_POOL_ADDRESS_P (XEXP (c
, 0)))
1311 c
= get_pool_constant (XEXP (c
, 0));
1313 /* Work around funny ideas get_condition has wrt canonicalization.
1314 Note that these rtx constants are known to be CONST_INT, and
1315 therefore imply integer comparisons. */
1316 if (c
== constm1_rtx
&& GET_CODE (cond
) == GT
)
1318 else if (c
== const1_rtx
&& GET_CODE (cond
) == LT
)
1320 else if (c
!= CONST0_RTX (GET_MODE (b
)))
1323 /* Determine what sort of operation this is. */
1324 switch (GET_CODE (cond
))
1343 target
= expand_unop (GET_MODE (if_info
->x
), abs_optab
, b
, if_info
->x
, 0);
1345 /* ??? It's a quandry whether cmove would be better here, especially
1346 for integers. Perhaps combine will clean things up. */
1347 if (target
&& negate
)
1348 target
= expand_unop (GET_MODE (target
), neg_optab
, target
, if_info
->x
, 0);
1356 if (target
!= if_info
->x
)
1357 noce_emit_move_insn (if_info
->x
, target
);
1362 if (seq_contains_jump (seq
))
1365 emit_insns_before (seq
, earliest
);
1366 if_info
->cond
= cond
;
1367 if_info
->cond_earliest
= earliest
;
1372 /* Look for the condition for the jump first. We'd prefer to avoid
1373 get_condition if we can -- it tries to look back for the contents
1374 of an original compare. On targets that use normal integers for
1375 comparisons, e.g. alpha, this is wasteful. */
1378 noce_get_condition (jump
, earliest
)
1385 /* If the condition variable is a register and is MODE_INT, accept it.
1386 Otherwise, fall back on get_condition. */
1388 if (! any_condjump_p (jump
))
1391 set
= pc_set (jump
);
1393 cond
= XEXP (SET_SRC (set
), 0);
1394 if (GET_CODE (XEXP (cond
, 0)) == REG
1395 && GET_MODE_CLASS (GET_MODE (XEXP (cond
, 0))) == MODE_INT
)
1399 /* If this branches to JUMP_LABEL when the condition is false,
1400 reverse the condition. */
1401 if (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
1402 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
))
1403 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
1404 GET_MODE (cond
), XEXP (cond
, 0),
1408 cond
= get_condition (jump
, earliest
);
1413 /* Return true if OP is ok for if-then-else processing. */
1416 noce_operand_ok (op
)
1419 /* We special-case memories, so handle any of them with
1420 no address side effects. */
1421 if (GET_CODE (op
) == MEM
)
1422 return ! side_effects_p (XEXP (op
, 0));
1424 if (side_effects_p (op
))
1427 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1428 being linked into the genfoo programs. This is probably a mistake.
1429 With finite operands, most fp operations don't trap. */
1430 if (!flag_trapping_math
&& FLOAT_MODE_P (GET_MODE (op
)))
1431 switch (GET_CODE (op
))
1437 /* ??? This is kinda lame -- almost every target will have forced
1438 the constant into a register first. But given the expense of
1439 division, this is probably for the best. */
1440 return (CONSTANT_P (XEXP (op
, 1))
1441 && XEXP (op
, 1) != CONST0_RTX (GET_MODE (op
))
1442 && ! may_trap_p (XEXP (op
, 0)));
1445 switch (GET_RTX_CLASS (GET_CODE (op
)))
1448 return ! may_trap_p (XEXP (op
, 0));
1451 return ! may_trap_p (XEXP (op
, 0)) && ! may_trap_p (XEXP (op
, 1));
1456 return ! may_trap_p (op
);
1459 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1460 without using conditional execution. Return TRUE if we were
1461 successful at converting the the block. */
1464 noce_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1465 basic_block test_bb
; /* Basic block test is in */
1466 basic_block then_bb
; /* Basic block for THEN block */
1467 basic_block else_bb
; /* Basic block for ELSE block */
1468 basic_block join_bb
; /* Basic block the join label is in */
1470 /* We're looking for patterns of the form
1472 (1) if (...) x = a; else x = b;
1473 (2) x = b; if (...) x = a;
1474 (3) if (...) x = a; // as if with an initial x = x.
1476 The later patterns require jumps to be more expensive.
1478 ??? For future expansion, look for multiple X in such patterns. */
1480 struct noce_if_info if_info
;
1483 rtx orig_x
, x
, a
, b
;
1484 rtx jump
, cond
, insn
;
1486 /* If this is not a standard conditional jump, we can't parse it. */
1487 jump
= test_bb
->end
;
1488 cond
= noce_get_condition (jump
, &if_info
.cond_earliest
);
1492 /* If the conditional jump is more than just a conditional jump,
1493 then we can not do if-conversion on this block. */
1494 if (! onlyjump_p (jump
))
1497 /* We must be comparing objects whose modes imply the size. */
1498 if (GET_MODE (XEXP (cond
, 0)) == BLKmode
)
1501 /* Look for one of the potential sets. */
1502 insn_a
= first_active_insn (then_bb
);
1504 || ! last_active_insn_p (then_bb
, insn_a
)
1505 || (set_a
= single_set (insn_a
)) == NULL_RTX
)
1508 x
= SET_DEST (set_a
);
1509 a
= SET_SRC (set_a
);
1511 /* Look for the other potential set. Make sure we've got equivalent
1513 /* ??? This is overconservative. Storing to two different mems is
1514 as easy as conditionally computing the address. Storing to a
1515 single mem merely requires a scratch memory to use as one of the
1516 destination addresses; often the memory immediately below the
1517 stack pointer is available for this. */
1521 insn_b
= first_active_insn (else_bb
);
1523 || ! last_active_insn_p (else_bb
, insn_b
)
1524 || (set_b
= single_set (insn_b
)) == NULL_RTX
1525 || ! rtx_equal_p (x
, SET_DEST (set_b
)))
1530 insn_b
= prev_nonnote_insn (if_info
.cond_earliest
);
1532 || GET_CODE (insn_b
) != INSN
1533 || (set_b
= single_set (insn_b
)) == NULL_RTX
1534 || ! rtx_equal_p (x
, SET_DEST (set_b
))
1535 || reg_mentioned_p (x
, cond
)
1536 || reg_mentioned_p (x
, a
)
1537 || reg_mentioned_p (x
, SET_SRC (set_b
)))
1538 insn_b
= set_b
= NULL_RTX
;
1540 b
= (set_b
? SET_SRC (set_b
) : x
);
1542 /* X may not be mentioned in the range (cond_earliest, jump]. */
1543 for (insn
= jump
; insn
!= if_info
.cond_earliest
; insn
= PREV_INSN (insn
))
1544 if (INSN_P (insn
) && reg_mentioned_p (x
, insn
))
1547 /* A and B may not be modified in the range [cond_earliest, jump). */
1548 for (insn
= if_info
.cond_earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
1550 && (modified_in_p (a
, insn
) || modified_in_p (b
, insn
)))
1553 /* Only operate on register destinations, and even then avoid extending
1554 the lifetime of hard registers on small register class machines. */
1556 if (GET_CODE (x
) != REG
1557 || (SMALL_REGISTER_CLASSES
1558 && REGNO (x
) < FIRST_PSEUDO_REGISTER
))
1562 x
= gen_reg_rtx (GET_MODE (GET_CODE (x
) == STRICT_LOW_PART
1563 ? XEXP (x
, 0) : x
));
1566 /* Don't operate on sources that may trap or are volatile. */
1567 if (! noce_operand_ok (a
) || ! noce_operand_ok (b
))
1570 /* Set up the info block for our subroutines. */
1571 if_info
.test_bb
= test_bb
;
1572 if_info
.cond
= cond
;
1573 if_info
.jump
= jump
;
1574 if_info
.insn_a
= insn_a
;
1575 if_info
.insn_b
= insn_b
;
1580 /* Try optimizations in some approximation of a useful order. */
1581 /* ??? Should first look to see if X is live incoming at all. If it
1582 isn't, we don't need anything but an unconditional set. */
1584 /* Look and see if A and B are really the same. Avoid creating silly
1585 cmove constructs that no one will fix up later. */
1586 if (rtx_equal_p (a
, b
))
1588 /* If we have an INSN_B, we don't have to create any new rtl. Just
1589 move the instruction that we already have. If we don't have an
1590 INSN_B, that means that A == X, and we've got a noop move. In
1591 that case don't do anything and let the code below delete INSN_A. */
1592 if (insn_b
&& else_bb
)
1596 if (else_bb
&& insn_b
== else_bb
->end
)
1597 else_bb
->end
= PREV_INSN (insn_b
);
1598 reorder_insns (insn_b
, insn_b
, PREV_INSN (if_info
.cond_earliest
));
1600 /* If there was a REG_EQUAL note, delete it since it may have been
1601 true due to this insn being after a jump. */
1602 if ((note
= find_reg_note (insn_b
, REG_EQUAL
, NULL_RTX
)) != 0)
1603 remove_note (insn_b
, note
);
1607 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1608 x must be executed twice. */
1609 else if (insn_b
&& side_effects_p (orig_x
))
1616 if (noce_try_store_flag (&if_info
))
1618 if (noce_try_minmax (&if_info
))
1620 if (noce_try_abs (&if_info
))
1622 if (HAVE_conditional_move
1623 && noce_try_cmove (&if_info
))
1625 if (! HAVE_conditional_execution
)
1627 if (noce_try_store_flag_constants (&if_info
))
1629 if (noce_try_store_flag_inc (&if_info
))
1631 if (noce_try_store_flag_mask (&if_info
))
1633 if (HAVE_conditional_move
1634 && noce_try_cmove_arith (&if_info
))
1641 /* The original sets may now be killed. */
1642 if (insn_a
== then_bb
->end
)
1643 then_bb
->end
= PREV_INSN (insn_a
);
1644 flow_delete_insn (insn_a
);
1646 /* Several special cases here: First, we may have reused insn_b above,
1647 in which case insn_b is now NULL. Second, we want to delete insn_b
1648 if it came from the ELSE block, because follows the now correct
1649 write that appears in the TEST block. However, if we got insn_b from
1650 the TEST block, it may in fact be loading data needed for the comparison.
1651 We'll let life_analysis remove the insn if it's really dead. */
1652 if (insn_b
&& else_bb
)
1654 if (insn_b
== else_bb
->end
)
1655 else_bb
->end
= PREV_INSN (insn_b
);
1656 flow_delete_insn (insn_b
);
1659 /* The new insns will have been inserted before cond_earliest. We should
1660 be able to remove the jump with impunity, but the condition itself may
1661 have been modified by gcse to be shared across basic blocks. */
1662 test_bb
->end
= PREV_INSN (jump
);
1663 flow_delete_insn (jump
);
1665 /* If we used a temporary, fix it up now. */
1669 noce_emit_move_insn (orig_x
, x
);
1670 insn_b
= gen_sequence ();
1673 test_bb
->end
= emit_insn_after (insn_b
, test_bb
->end
);
1676 /* Merge the blocks! */
1677 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
1682 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1683 straight line code. Return true if successful. */
1686 process_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1687 basic_block test_bb
; /* Basic block test is in */
1688 basic_block then_bb
; /* Basic block for THEN block */
1689 basic_block else_bb
; /* Basic block for ELSE block */
1690 basic_block join_bb
; /* Basic block the join label is in */
1692 if (! reload_completed
1693 && noce_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
))
1696 if (HAVE_conditional_execution
1698 && cond_exec_process_if_block (test_bb
, then_bb
, else_bb
, join_bb
))
1704 /* Merge the blocks and mark for local life update. */
1707 merge_if_block (test_bb
, then_bb
, else_bb
, join_bb
)
1708 basic_block test_bb
; /* Basic block test is in */
1709 basic_block then_bb
; /* Basic block for THEN block */
1710 basic_block else_bb
; /* Basic block for ELSE block */
1711 basic_block join_bb
; /* Basic block the join label is in */
1713 basic_block combo_bb
;
1715 /* All block merging is done into the lower block numbers. */
1719 /* First merge TEST block into THEN block. This is a no-brainer since
1720 the THEN block did not have a code label to begin with. */
1722 if (combo_bb
->global_live_at_end
)
1723 COPY_REG_SET (combo_bb
->global_live_at_end
, then_bb
->global_live_at_end
);
1724 merge_blocks_nomove (combo_bb
, then_bb
);
1725 num_removed_blocks
++;
1727 /* The ELSE block, if it existed, had a label. That label count
1728 will almost always be zero, but odd things can happen when labels
1729 get their addresses taken. */
1732 merge_blocks_nomove (combo_bb
, else_bb
);
1733 num_removed_blocks
++;
1736 /* If there was no join block reported, that means it was not adjacent
1737 to the others, and so we cannot merge them. */
1741 /* The outgoing edge for the current COMBO block should already
1742 be correct. Verify this. */
1743 if (combo_bb
->succ
== NULL_EDGE
)
1746 /* There should sill be a branch at the end of the THEN or ELSE
1747 blocks taking us to our final destination. */
1748 if (! simplejump_p (combo_bb
->end
)
1749 && ! returnjump_p (combo_bb
->end
))
1753 /* The JOIN block may have had quite a number of other predecessors too.
1754 Since we've already merged the TEST, THEN and ELSE blocks, we should
1755 have only one remaining edge from our if-then-else diamond. If there
1756 is more than one remaining edge, it must come from elsewhere. There
1757 may be zero incoming edges if the THEN block didn't actually join
1758 back up (as with a call to abort). */
1759 else if (join_bb
->pred
== NULL
|| join_bb
->pred
->pred_next
== NULL
)
1761 /* We can merge the JOIN. */
1762 if (combo_bb
->global_live_at_end
)
1763 COPY_REG_SET (combo_bb
->global_live_at_end
,
1764 join_bb
->global_live_at_end
);
1765 merge_blocks_nomove (combo_bb
, join_bb
);
1766 num_removed_blocks
++;
1770 /* We cannot merge the JOIN. */
1772 /* The outgoing edge for the current COMBO block should already
1773 be correct. Verify this. */
1774 if (combo_bb
->succ
->succ_next
!= NULL_EDGE
1775 || combo_bb
->succ
->dest
!= join_bb
)
1778 /* Remove the jump and cruft from the end of the COMBO block. */
1779 tidy_fallthru_edge (combo_bb
->succ
, combo_bb
, join_bb
);
1782 /* Make sure we update life info properly. */
1783 SET_UPDATE_LIFE (combo_bb
);
1785 num_updated_if_blocks
++;
1788 /* Find a block ending in a simple IF condition. Return TRUE if
1789 we were able to transform it in some way. */
1792 find_if_header (test_bb
)
1793 basic_block test_bb
;
1798 /* The kind of block we're looking for has exactly two successors. */
1799 if ((then_edge
= test_bb
->succ
) == NULL_EDGE
1800 || (else_edge
= then_edge
->succ_next
) == NULL_EDGE
1801 || else_edge
->succ_next
!= NULL_EDGE
)
1804 /* Neither edge should be abnormal. */
1805 if ((then_edge
->flags
& EDGE_COMPLEX
)
1806 || (else_edge
->flags
& EDGE_COMPLEX
))
1809 /* The THEN edge is canonically the one that falls through. */
1810 if (then_edge
->flags
& EDGE_FALLTHRU
)
1812 else if (else_edge
->flags
& EDGE_FALLTHRU
)
1815 else_edge
= then_edge
;
1819 /* Otherwise this must be a multiway branch of some sort. */
1822 if (find_if_block (test_bb
, then_edge
, else_edge
))
1825 && (! HAVE_conditional_execution
|| reload_completed
))
1827 if (find_if_case_1 (test_bb
, then_edge
, else_edge
))
1829 if (find_if_case_2 (test_bb
, then_edge
, else_edge
))
1837 fprintf (rtl_dump_file
, "Conversion succeeded.\n");
1841 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1842 block. If so, we'll try to convert the insns to not require the branch.
1843 Return TRUE if we were successful at converting the the block. */
1846 find_if_block (test_bb
, then_edge
, else_edge
)
1847 basic_block test_bb
;
1848 edge then_edge
, else_edge
;
1850 basic_block then_bb
= then_edge
->dest
;
1851 basic_block else_bb
= else_edge
->dest
;
1852 basic_block join_bb
= NULL_BLOCK
;
1853 edge then_succ
= then_bb
->succ
;
1854 edge else_succ
= else_bb
->succ
;
1857 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1858 if (then_bb
->pred
->pred_next
!= NULL_EDGE
)
1861 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1862 if (then_succ
!= NULL_EDGE
1863 && (then_succ
->succ_next
!= NULL_EDGE
1864 || (then_succ
->flags
& EDGE_COMPLEX
)))
1867 /* If the THEN block has no successors, conditional execution can still
1868 make a conditional call. Don't do this unless the ELSE block has
1869 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1870 Check for the last insn of the THEN block being an indirect jump, which
1871 is listed as not having any successors, but confuses the rest of the CE
1872 code processing. XXX we should fix this in the future. */
1873 if (then_succ
== NULL
)
1875 if (else_bb
->pred
->pred_next
== NULL_EDGE
)
1877 rtx last_insn
= then_bb
->end
;
1880 && GET_CODE (last_insn
) == NOTE
1881 && last_insn
!= then_bb
->head
)
1882 last_insn
= PREV_INSN (last_insn
);
1885 && GET_CODE (last_insn
) == JUMP_INSN
1886 && ! simplejump_p (last_insn
))
1890 else_bb
= NULL_BLOCK
;
1896 /* If the THEN block's successor is the other edge out of the TEST block,
1897 then we have an IF-THEN combo without an ELSE. */
1898 else if (then_succ
->dest
== else_bb
)
1901 else_bb
= NULL_BLOCK
;
1904 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1905 has exactly one predecessor and one successor, and the outgoing edge
1906 is not complex, then we have an IF-THEN-ELSE combo. */
1907 else if (else_succ
!= NULL_EDGE
1908 && then_succ
->dest
== else_succ
->dest
1909 && else_bb
->pred
->pred_next
== NULL_EDGE
1910 && else_succ
->succ_next
== NULL_EDGE
1911 && ! (else_succ
->flags
& EDGE_COMPLEX
))
1912 join_bb
= else_succ
->dest
;
1914 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1918 num_possible_if_blocks
++;
1923 fprintf (rtl_dump_file
,
1924 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1925 test_bb
->index
, then_bb
->index
, else_bb
->index
,
1928 fprintf (rtl_dump_file
,
1929 "\nIF-THEN block found, start %d, then %d, join %d\n",
1930 test_bb
->index
, then_bb
->index
, join_bb
->index
);
1933 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1934 get the first condition for free, since we've already asserted that
1935 there's a fallthru edge from IF to THEN. */
1936 /* ??? As an enhancement, move the ELSE block. Have to deal with
1937 BLOCK notes, if by no other means than aborting the merge if they
1938 exist. Sticky enough I don't want to think about it now. */
1939 next_index
= then_bb
->index
;
1940 if (else_bb
&& ++next_index
!= else_bb
->index
)
1942 if (++next_index
!= join_bb
->index
)
1950 /* Do the real work. */
1951 return process_if_block (test_bb
, then_bb
, else_bb
, join_bb
);
1954 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1955 transformable, but not necessarily the other. There need be no
1958 Return TRUE if we were successful at converting the the block.
1960 Cases we'd like to look at:
1963 if (test) goto over; // x not live
1971 if (! test) goto label;
1974 if (test) goto E; // x not live
1988 (3) // This one's really only interesting for targets that can do
1989 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1990 // it results in multiple branches on a cache line, which often
1991 // does not sit well with predictors.
1993 if (test1) goto E; // predicted not taken
2009 (A) Don't do (2) if the branch is predicted against the block we're
2010 eliminating. Do it anyway if we can eliminate a branch; this requires
2011 that the sole successor of the eliminated block postdominate the other
2014 (B) With CE, on (3) we can steal from both sides of the if, creating
2023 Again, this is most useful if J postdominates.
2025 (C) CE substitutes for helpful life information.
2027 (D) These heuristics need a lot of work. */
2029 /* Tests for case 1 above. */
2032 find_if_case_1 (test_bb
, then_edge
, else_edge
)
2033 basic_block test_bb
;
2034 edge then_edge
, else_edge
;
2036 basic_block then_bb
= then_edge
->dest
;
2037 basic_block else_bb
= else_edge
->dest
;
2038 edge then_succ
= then_bb
->succ
;
2041 /* THEN has one successor. */
2042 if (!then_succ
|| then_succ
->succ_next
!= NULL
)
2045 /* THEN does not fall through, but is not strange either. */
2046 if (then_succ
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
))
2049 /* THEN has one predecessor. */
2050 if (then_bb
->pred
->pred_next
!= NULL
)
2053 /* ELSE follows THEN. (??? could be moved) */
2054 if (else_bb
->index
!= then_bb
->index
+ 1)
2057 num_possible_if_blocks
++;
2059 fprintf (rtl_dump_file
,
2060 "\nIF-CASE-1 found, start %d, then %d\n",
2061 test_bb
->index
, then_bb
->index
);
2063 /* THEN is small. */
2064 if (count_bb_insns (then_bb
) > BRANCH_COST
)
2067 /* Find the label for THEN's destination. */
2068 if (then_succ
->dest
== EXIT_BLOCK_PTR
)
2072 new_lab
= JUMP_LABEL (then_bb
->end
);
2077 /* Registers set are dead, or are predicable. */
2078 if (! dead_or_predicable (test_bb
, then_bb
, else_bb
, new_lab
, 1))
2081 /* Conversion went ok, including moving the insns and fixing up the
2082 jump. Adjust the CFG to match. */
2084 SET_UPDATE_LIFE (test_bb
);
2085 bitmap_operation (test_bb
->global_live_at_end
,
2086 else_bb
->global_live_at_start
,
2087 then_bb
->global_live_at_end
, BITMAP_IOR
);
2089 make_edge (NULL
, test_bb
, then_succ
->dest
, 0);
2090 flow_delete_block (then_bb
);
2091 tidy_fallthru_edge (else_edge
, test_bb
, else_bb
);
2093 num_removed_blocks
++;
2094 num_updated_if_blocks
++;
2099 /* Test for case 2 above. */
2102 find_if_case_2 (test_bb
, then_edge
, else_edge
)
2103 basic_block test_bb
;
2104 edge then_edge
, else_edge
;
2106 basic_block then_bb
= then_edge
->dest
;
2107 basic_block else_bb
= else_edge
->dest
;
2108 edge else_succ
= else_bb
->succ
;
2111 /* ELSE has one successor. */
2112 if (!else_succ
|| else_succ
->succ_next
!= NULL
)
2115 /* ELSE outgoing edge is not complex. */
2116 if (else_succ
->flags
& EDGE_COMPLEX
)
2119 /* ELSE has one predecessor. */
2120 if (else_bb
->pred
->pred_next
!= NULL
)
2123 /* THEN is not EXIT. */
2124 if (then_bb
->index
< 0)
2127 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2128 note
= find_reg_note (test_bb
->end
, REG_BR_PROB
, NULL_RTX
);
2129 if (note
&& INTVAL (XEXP (note
, 0)) >= REG_BR_PROB_BASE
/ 2)
2131 else if (else_succ
->dest
->index
< 0
2132 || TEST_BIT (post_dominators
[ORIG_INDEX (then_bb
)],
2133 ORIG_INDEX (else_succ
->dest
)))
2138 num_possible_if_blocks
++;
2140 fprintf (rtl_dump_file
,
2141 "\nIF-CASE-2 found, start %d, else %d\n",
2142 test_bb
->index
, else_bb
->index
);
2144 /* ELSE is small. */
2145 if (count_bb_insns (then_bb
) > BRANCH_COST
)
2148 /* Find the label for ELSE's destination. */
2149 if (else_succ
->dest
== EXIT_BLOCK_PTR
)
2153 if (else_succ
->flags
& EDGE_FALLTHRU
)
2155 new_lab
= else_succ
->dest
->head
;
2156 if (GET_CODE (new_lab
) != CODE_LABEL
)
2161 new_lab
= JUMP_LABEL (else_bb
->end
);
2167 /* Registers set are dead, or are predicable. */
2168 if (! dead_or_predicable (test_bb
, else_bb
, then_bb
, new_lab
, 0))
2171 /* Conversion went ok, including moving the insns and fixing up the
2172 jump. Adjust the CFG to match. */
2174 SET_UPDATE_LIFE (test_bb
);
2175 bitmap_operation (test_bb
->global_live_at_end
,
2176 then_bb
->global_live_at_start
,
2177 else_bb
->global_live_at_end
, BITMAP_IOR
);
2179 remove_edge (else_edge
);
2180 make_edge (NULL
, test_bb
, else_succ
->dest
, 0);
2181 flow_delete_block (else_bb
);
2183 num_removed_blocks
++;
2184 num_updated_if_blocks
++;
2186 /* ??? We may now fallthru from one of THEN's successors into a join
2187 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2192 /* A subroutine of dead_or_predicable called through for_each_rtx.
2193 Return 1 if a memory is found. */
2196 find_memory (px
, data
)
2198 void *data ATTRIBUTE_UNUSED
;
2200 return GET_CODE (*px
) == MEM
;
2203 /* Used by the code above to perform the actual rtl transformations.
2204 Return TRUE if successful.
2206 TEST_BB is the block containing the conditional branch. MERGE_BB
2207 is the block containing the code to manipulate. NEW_DEST is the
2208 label TEST_BB should be branching to after the conversion.
2209 REVERSEP is true if the sense of the branch should be reversed. */
2212 dead_or_predicable (test_bb
, merge_bb
, other_bb
, new_dest
, reversep
)
2213 basic_block test_bb
, merge_bb
, other_bb
;
2217 rtx head
, end
, jump
, earliest
, old_dest
;
2219 jump
= test_bb
->end
;
2221 /* Find the extent of the real code in the merge block. */
2222 head
= merge_bb
->head
;
2223 end
= merge_bb
->end
;
2225 if (GET_CODE (head
) == CODE_LABEL
)
2226 head
= NEXT_INSN (head
);
2227 if (GET_CODE (head
) == NOTE
)
2231 head
= end
= NULL_RTX
;
2234 head
= NEXT_INSN (head
);
2237 if (GET_CODE (end
) == JUMP_INSN
)
2241 head
= end
= NULL_RTX
;
2244 end
= PREV_INSN (end
);
2247 /* Disable handling dead code by conditional execution if the machine needs
2248 to do anything funny with the tests, etc. */
2249 #ifndef IFCVT_MODIFY_TESTS
2250 if (HAVE_conditional_execution
)
2252 /* In the conditional execution case, we have things easy. We know
2253 the condition is reversable. We don't have to check life info,
2254 becase we're going to conditionally execute the code anyway.
2255 All that's left is making sure the insns involved can actually
2260 cond
= cond_exec_get_condition (jump
);
2262 prob_val
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
2264 prob_val
= XEXP (prob_val
, 0);
2268 cond
= gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond
)),
2269 GET_MODE (cond
), XEXP (cond
, 0),
2272 prob_val
= GEN_INT (REG_BR_PROB_BASE
- INTVAL (prob_val
));
2275 if (! cond_exec_process_insns (head
, end
, cond
, prob_val
, 0))
2283 /* In the non-conditional execution case, we have to verify that there
2284 are no trapping operations, no calls, no references to memory, and
2285 that any registers modified are dead at the branch site. */
2287 rtx insn
, cond
, prev
;
2288 regset_head merge_set_head
, tmp_head
, test_live_head
, test_set_head
;
2289 regset merge_set
, tmp
, test_live
, test_set
;
2290 struct propagate_block_info
*pbi
;
2293 /* Check for no calls or trapping operations. */
2294 for (insn
= head
; ; insn
= NEXT_INSN (insn
))
2296 if (GET_CODE (insn
) == CALL_INSN
)
2300 if (may_trap_p (PATTERN (insn
)))
2303 /* ??? Even non-trapping memories such as stack frame
2304 references must be avoided. For stores, we collect
2305 no lifetime info; for reads, we'd have to assert
2306 true_dependance false against every store in the
2308 if (for_each_rtx (&PATTERN (insn
), find_memory
, NULL
))
2315 if (! any_condjump_p (jump
))
2318 /* Find the extent of the conditional. */
2319 cond
= noce_get_condition (jump
, &earliest
);
2324 MERGE_SET = set of registers set in MERGE_BB
2325 TEST_LIVE = set of registers live at EARLIEST
2326 TEST_SET = set of registers set between EARLIEST and the
2327 end of the block. */
2329 tmp
= INITIALIZE_REG_SET (tmp_head
);
2330 merge_set
= INITIALIZE_REG_SET (merge_set_head
);
2331 test_live
= INITIALIZE_REG_SET (test_live_head
);
2332 test_set
= INITIALIZE_REG_SET (test_set_head
);
2334 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2335 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2336 since we've already asserted that MERGE_BB is small. */
2337 propagate_block (merge_bb
, tmp
, merge_set
, merge_set
, 0);
2339 /* For small register class machines, don't lengthen lifetimes of
2340 hard registers before reload. */
2341 if (SMALL_REGISTER_CLASSES
&& ! reload_completed
)
2343 EXECUTE_IF_SET_IN_BITMAP
2346 if (i
< FIRST_PSEUDO_REGISTER
2348 && ! global_regs
[i
])
2353 /* For TEST, we're interested in a range of insns, not a whole block.
2354 Moreover, we're interested in the insns live from OTHER_BB. */
2356 COPY_REG_SET (test_live
, other_bb
->global_live_at_start
);
2357 pbi
= init_propagate_block_info (test_bb
, test_live
, test_set
, test_set
,
2360 for (insn
= jump
; ; insn
= prev
)
2362 prev
= propagate_one_insn (pbi
, insn
);
2363 if (insn
== earliest
)
2367 free_propagate_block_info (pbi
);
2369 /* We can perform the transformation if
2370 MERGE_SET & (TEST_SET | TEST_LIVE)
2372 TEST_SET & merge_bb->global_live_at_start
2375 bitmap_operation (tmp
, test_set
, test_live
, BITMAP_IOR
);
2376 bitmap_operation (tmp
, tmp
, merge_set
, BITMAP_AND
);
2377 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
2379 bitmap_operation (tmp
, test_set
, merge_bb
->global_live_at_start
,
2381 EXECUTE_IF_SET_IN_BITMAP(tmp
, 0, i
, fail
= 1);
2384 FREE_REG_SET (merge_set
);
2385 FREE_REG_SET (test_live
);
2386 FREE_REG_SET (test_set
);
2393 /* We don't want to use normal invert_jump or redirect_jump because
2394 we don't want to delete_insn called. Also, we want to do our own
2395 change group management. */
2397 old_dest
= JUMP_LABEL (jump
);
2399 ? ! invert_jump_1 (jump
, new_dest
)
2400 : ! redirect_jump_1 (jump
, new_dest
))
2403 if (! apply_change_group ())
2407 LABEL_NUSES (old_dest
) -= 1;
2409 LABEL_NUSES (new_dest
) += 1;
2410 JUMP_LABEL (jump
) = new_dest
;
2414 rtx note
= find_reg_note (jump
, REG_BR_PROB
, NULL_RTX
);
2416 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
2419 /* Move the insns out of MERGE_BB to before the branch. */
2422 if (end
== merge_bb
->end
)
2423 merge_bb
->end
= PREV_INSN (head
);
2425 head
= squeeze_notes (head
, end
);
2426 if (GET_CODE (end
) == NOTE
2427 && (NOTE_LINE_NUMBER (end
) == NOTE_INSN_BLOCK_END
2428 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_BLOCK_BEG
2429 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_BEG
2430 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_END
2431 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_CONT
2432 || NOTE_LINE_NUMBER (end
) == NOTE_INSN_LOOP_VTOP
))
2436 end
= PREV_INSN (end
);
2439 reorder_insns (head
, end
, PREV_INSN (earliest
));
2448 /* Main entry point for all if-conversion. */
2451 if_convert (life_data_ok
)
2456 num_possible_if_blocks
= 0;
2457 num_updated_if_blocks
= 0;
2458 num_removed_blocks
= 0;
2460 /* Free up basic_block_for_insn so that we don't have to keep it
2461 up to date, either here or in merge_blocks_nomove. */
2462 free_basic_block_vars (1);
2464 /* Compute postdominators if we think we'll use them. */
2465 post_dominators
= NULL
;
2466 if (HAVE_conditional_execution
|| life_data_ok
)
2468 post_dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
2469 calculate_dominance_info (NULL
, post_dominators
, CDI_POST_DOMINATORS
);
2472 /* Record initial block numbers. */
2473 for (block_num
= 0; block_num
< n_basic_blocks
; block_num
++)
2474 SET_ORIG_INDEX (BASIC_BLOCK (block_num
), block_num
);
2476 /* Go through each of the basic blocks looking for things to convert. */
2477 for (block_num
= 0; block_num
< n_basic_blocks
; )
2479 basic_block bb
= BASIC_BLOCK (block_num
);
2480 if (find_if_header (bb
))
2481 block_num
= bb
->index
;
2486 if (post_dominators
)
2487 sbitmap_vector_free (post_dominators
);
2490 fflush (rtl_dump_file
);
2492 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2493 compute_bb_for_insn (get_max_uid ());
2495 /* Rebuild life info for basic blocks that require it. */
2496 if (num_removed_blocks
&& life_data_ok
)
2498 sbitmap update_life_blocks
= sbitmap_alloc (n_basic_blocks
);
2499 sbitmap_zero (update_life_blocks
);
2501 /* If we allocated new pseudos, we must resize the array for sched1. */
2502 if (max_regno
< max_reg_num ())
2504 max_regno
= max_reg_num ();
2505 allocate_reg_info (max_regno
, FALSE
, FALSE
);
2508 for (block_num
= 0; block_num
< n_basic_blocks
; block_num
++)
2509 if (UPDATE_LIFE (BASIC_BLOCK (block_num
)))
2510 SET_BIT (update_life_blocks
, block_num
);
2512 count_or_remove_death_notes (update_life_blocks
, 1);
2513 /* ??? See about adding a mode that verifies that the initial
2514 set of blocks don't let registers come live. */
2515 update_life_info (update_life_blocks
, UPDATE_LIFE_GLOBAL
,
2516 PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
2517 | PROP_KILL_DEAD_CODE
);
2519 sbitmap_free (update_life_blocks
);
2522 /* Write the final stats. */
2523 if (rtl_dump_file
&& num_possible_if_blocks
> 0)
2525 fprintf (rtl_dump_file
,
2526 "\n%d possible IF blocks searched.\n",
2527 num_possible_if_blocks
);
2528 fprintf (rtl_dump_file
,
2529 "%d IF blocks converted.\n",
2530 num_updated_if_blocks
);
2531 fprintf (rtl_dump_file
,
2532 "%d basic blocks deleted.\n\n\n",
2533 num_removed_blocks
);
2536 #ifdef ENABLE_CHECKING
2537 verify_flow_info ();