* config/xtensa/xtensa.c (xtensa_build_va_list): Use
[official-gcc.git] / gcc / ifcvt.c
blob4ed1494416e3208107ce2a0d895e4be3325e518b
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "except.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "expr.h"
34 #include "real.h"
35 #include "output.h"
36 #include "toplev.h"
37 #include "tm_p.h"
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
42 #endif
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
45 #endif
46 #ifndef HAVE_incscc
47 #define HAVE_incscc 0
48 #endif
49 #ifndef HAVE_decscc
50 #define HAVE_decscc 0
51 #endif
52 #ifndef HAVE_trap
53 #define HAVE_trap 0
54 #endif
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
57 #endif
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
61 #endif
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
76 /* True if life data ok at present. */
77 static bool life_data_ok;
79 /* The post-dominator relation on the original block numbers. */
80 static sbitmap *post_dominators;
82 /* Forward references. */
83 static int count_bb_insns PARAMS ((basic_block));
84 static rtx first_active_insn PARAMS ((basic_block));
85 static int last_active_insn_p PARAMS ((basic_block, rtx));
86 static int seq_contains_jump PARAMS ((rtx));
88 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
89 static rtx cond_exec_get_condition PARAMS ((rtx));
90 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
93 static rtx noce_get_condition PARAMS ((rtx, rtx *));
94 static int noce_operand_ok PARAMS ((rtx));
95 static int noce_process_if_block PARAMS ((basic_block, basic_block,
96 basic_block, basic_block));
98 static int process_if_block PARAMS ((basic_block, basic_block,
99 basic_block, basic_block));
100 static void merge_if_block PARAMS ((basic_block, basic_block,
101 basic_block, basic_block));
103 static int find_if_header PARAMS ((basic_block));
104 static int find_if_block PARAMS ((basic_block, edge, edge));
105 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
106 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
107 static int find_cond_trap PARAMS ((basic_block, edge, edge));
108 static rtx block_has_only_trap PARAMS ((basic_block));
109 static int find_memory PARAMS ((rtx *, void *));
110 static int dead_or_predicable PARAMS ((basic_block, basic_block,
111 basic_block, basic_block, int));
112 static void noce_emit_move_insn PARAMS ((rtx, rtx));
114 /* Count the number of non-jump active insns in BB. */
116 static int
117 count_bb_insns (bb)
118 basic_block bb;
120 int count = 0;
121 rtx insn = bb->head;
123 while (1)
125 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 count++;
128 if (insn == bb->end)
129 break;
130 insn = NEXT_INSN (insn);
133 return count;
136 /* Return the first non-jump active insn in the basic block. */
138 static rtx
139 first_active_insn (bb)
140 basic_block bb;
142 rtx insn = bb->head;
144 if (GET_CODE (insn) == CODE_LABEL)
146 if (insn == bb->end)
147 return NULL_RTX;
148 insn = NEXT_INSN (insn);
151 while (GET_CODE (insn) == NOTE)
153 if (insn == bb->end)
154 return NULL_RTX;
155 insn = NEXT_INSN (insn);
158 if (GET_CODE (insn) == JUMP_INSN)
159 return NULL_RTX;
161 return insn;
164 /* Return true if INSN is the last active non-jump insn in BB. */
166 static int
167 last_active_insn_p (bb, insn)
168 basic_block bb;
169 rtx insn;
173 if (insn == bb->end)
174 return TRUE;
175 insn = NEXT_INSN (insn);
177 while (GET_CODE (insn) == NOTE);
179 return GET_CODE (insn) == JUMP_INSN;
182 /* It is possible, especially when having dealt with multi-word
183 arithmetic, for the expanders to have emitted jumps. Search
184 through the sequence and return TRUE if a jump exists so that
185 we can abort the conversion. */
187 static int
188 seq_contains_jump (insn)
189 rtx insn;
191 while (insn)
193 if (GET_CODE (insn) == JUMP_INSN)
194 return 1;
195 insn = NEXT_INSN (insn);
197 return 0;
200 /* Go through a bunch of insns, converting them to conditional
201 execution format if possible. Return TRUE if all of the non-note
202 insns were processed. */
204 static int
205 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
206 rtx start; /* first insn to look at */
207 rtx end; /* last insn to look at */
208 rtx test; /* conditional execution test */
209 rtx prob_val; /* probability of branch taken. */
210 int mod_ok; /* true if modifications ok last insn. */
212 int must_be_last = FALSE;
213 rtx insn;
214 rtx pattern;
216 for (insn = start; ; insn = NEXT_INSN (insn))
218 if (GET_CODE (insn) == NOTE)
219 goto insn_done;
221 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
222 abort ();
224 /* Remove USE insns that get in the way. */
225 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
227 /* ??? Ug. Actually unlinking the thing is problematic,
228 given what we'd have to coordinate with our callers. */
229 PUT_CODE (insn, NOTE);
230 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
231 NOTE_SOURCE_FILE (insn) = 0;
232 goto insn_done;
235 /* Last insn wasn't last? */
236 if (must_be_last)
237 return FALSE;
239 if (modified_in_p (test, insn))
241 if (!mod_ok)
242 return FALSE;
243 must_be_last = TRUE;
246 /* Now build the conditional form of the instruction. */
247 pattern = PATTERN (insn);
249 /* If the machine needs to modify the insn being conditionally executed,
250 say for example to force a constant integer operand into a temp
251 register, do so here. */
252 #ifdef IFCVT_MODIFY_INSN
253 IFCVT_MODIFY_INSN (pattern, insn);
254 if (! pattern)
255 return FALSE;
256 #endif
258 validate_change (insn, &PATTERN (insn),
259 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
260 pattern), 1);
262 if (GET_CODE (insn) == CALL_INSN && prob_val)
263 validate_change (insn, &REG_NOTES (insn),
264 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
265 REG_NOTES (insn)), 1);
267 insn_done:
268 if (insn == end)
269 break;
272 return TRUE;
275 /* Return the condition for a jump. Do not do any special processing. */
277 static rtx
278 cond_exec_get_condition (jump)
279 rtx jump;
281 rtx test_if, cond;
283 if (any_condjump_p (jump))
284 test_if = SET_SRC (pc_set (jump));
285 else
286 return NULL_RTX;
287 cond = XEXP (test_if, 0);
289 /* If this branches to JUMP_LABEL when the condition is false,
290 reverse the condition. */
291 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
292 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
294 enum rtx_code rev = reversed_comparison_code (cond, jump);
295 if (rev == UNKNOWN)
296 return NULL_RTX;
298 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
299 XEXP (cond, 1));
302 return cond;
305 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
306 to conditional execution. Return TRUE if we were successful at
307 converting the the block. */
309 static int
310 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
311 basic_block test_bb; /* Basic block test is in */
312 basic_block then_bb; /* Basic block for THEN block */
313 basic_block else_bb; /* Basic block for ELSE block */
314 basic_block join_bb; /* Basic block the join label is in */
316 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
317 rtx then_start; /* first insn in THEN block */
318 rtx then_end; /* last insn + 1 in THEN block */
319 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
320 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
321 int max; /* max # of insns to convert. */
322 int then_mod_ok; /* whether conditional mods are ok in THEN */
323 rtx true_expr; /* test for else block insns */
324 rtx false_expr; /* test for then block insns */
325 rtx true_prob_val; /* probability of else block */
326 rtx false_prob_val; /* probability of then block */
327 int n_insns;
328 enum rtx_code false_code;
330 /* Find the conditional jump to the ELSE or JOIN part, and isolate
331 the test. */
332 test_expr = cond_exec_get_condition (test_bb->end);
333 if (! test_expr)
334 return FALSE;
336 /* If the conditional jump is more than just a conditional jump,
337 then we can not do conditional execution conversion on this block. */
338 if (!onlyjump_p (test_bb->end))
339 return FALSE;
341 /* Collect the bounds of where we're to search. */
343 then_start = then_bb->head;
344 then_end = then_bb->end;
346 /* Skip a label heading THEN block. */
347 if (GET_CODE (then_start) == CODE_LABEL)
348 then_start = NEXT_INSN (then_start);
350 /* Skip a (use (const_int 0)) or branch as the final insn. */
351 if (GET_CODE (then_end) == INSN
352 && GET_CODE (PATTERN (then_end)) == USE
353 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
354 then_end = PREV_INSN (then_end);
355 else if (GET_CODE (then_end) == JUMP_INSN)
356 then_end = PREV_INSN (then_end);
358 if (else_bb)
360 /* Skip the ELSE block's label. */
361 else_start = NEXT_INSN (else_bb->head);
362 else_end = else_bb->end;
364 /* Skip a (use (const_int 0)) or branch as the final insn. */
365 if (GET_CODE (else_end) == INSN
366 && GET_CODE (PATTERN (else_end)) == USE
367 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
368 else_end = PREV_INSN (else_end);
369 else if (GET_CODE (else_end) == JUMP_INSN)
370 else_end = PREV_INSN (else_end);
373 /* How many instructions should we convert in total? */
374 n_insns = 0;
375 if (else_bb)
377 max = 2 * MAX_CONDITIONAL_EXECUTE;
378 n_insns = count_bb_insns (else_bb);
380 else
381 max = MAX_CONDITIONAL_EXECUTE;
382 n_insns += count_bb_insns (then_bb);
383 if (n_insns > max)
384 return FALSE;
386 /* Map test_expr/test_jump into the appropriate MD tests to use on
387 the conditionally executed code. */
389 true_expr = test_expr;
391 false_code = reversed_comparison_code (true_expr, test_bb->end);
392 if (false_code != UNKNOWN)
393 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
394 XEXP (true_expr, 0), XEXP (true_expr, 1));
395 else
396 false_expr = NULL_RTX;
398 #ifdef IFCVT_MODIFY_TESTS
399 /* If the machine description needs to modify the tests, such as setting a
400 conditional execution register from a comparison, it can do so here. */
401 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
402 join_bb);
404 /* See if the conversion failed */
405 if (!true_expr || !false_expr)
406 goto fail;
407 #endif
409 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
410 if (true_prob_val)
412 true_prob_val = XEXP (true_prob_val, 0);
413 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
415 else
416 false_prob_val = NULL_RTX;
418 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
419 on then THEN block. */
420 then_mod_ok = (else_bb == NULL_BLOCK);
422 /* Go through the THEN and ELSE blocks converting the insns if possible
423 to conditional execution. */
425 if (then_end
426 && (! false_expr
427 || ! cond_exec_process_insns (then_start, then_end, false_expr,
428 false_prob_val, then_mod_ok)))
429 goto fail;
431 if (else_bb
432 && ! cond_exec_process_insns (else_start, else_end,
433 true_expr, true_prob_val, TRUE))
434 goto fail;
436 if (! apply_change_group ())
437 return FALSE;
439 #ifdef IFCVT_MODIFY_FINAL
440 /* Do any machine dependent final modifications */
441 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
442 #endif
444 /* Conversion succeeded. */
445 if (rtl_dump_file)
446 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
447 n_insns, (n_insns == 1) ? " was" : "s were");
449 /* Merge the blocks! */
450 merge_if_block (test_bb, then_bb, else_bb, join_bb);
451 return TRUE;
453 fail:
454 #ifdef IFCVT_MODIFY_CANCEL
455 /* Cancel any machine dependent changes. */
456 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
457 #endif
459 cancel_changes (0);
460 return FALSE;
463 /* Used by noce_process_if_block to communicate with its subroutines.
465 The subroutines know that A and B may be evaluated freely. They
466 know that X is a register. They should insert new instructions
467 before cond_earliest. */
469 struct noce_if_info
471 basic_block test_bb;
472 rtx insn_a, insn_b;
473 rtx x, a, b;
474 rtx jump, cond, cond_earliest;
477 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
478 rtx, int, int));
479 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
480 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
481 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
482 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
483 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
484 rtx, enum rtx_code, rtx,
485 rtx, rtx, rtx));
486 static int noce_try_cmove PARAMS ((struct noce_if_info *));
487 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
488 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
489 rtx, rtx *));
490 static int noce_try_minmax PARAMS ((struct noce_if_info *));
491 static int noce_try_abs PARAMS ((struct noce_if_info *));
493 /* Helper function for noce_try_store_flag*. */
495 static rtx
496 noce_emit_store_flag (if_info, x, reversep, normalize)
497 struct noce_if_info *if_info;
498 rtx x;
499 int reversep, normalize;
501 rtx cond = if_info->cond;
502 int cond_complex;
503 enum rtx_code code;
505 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
506 || ! general_operand (XEXP (cond, 1), VOIDmode));
508 /* If earliest == jump, or when the condition is complex, try to
509 build the store_flag insn directly. */
511 if (cond_complex)
512 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
514 if (reversep)
515 code = reversed_comparison_code (cond, if_info->jump);
516 else
517 code = GET_CODE (cond);
519 if ((if_info->cond_earliest == if_info->jump || cond_complex)
520 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
522 rtx tmp;
524 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
525 XEXP (cond, 1));
526 tmp = gen_rtx_SET (VOIDmode, x, tmp);
528 start_sequence ();
529 tmp = emit_insn (tmp);
531 if (recog_memoized (tmp) >= 0)
533 tmp = get_insns ();
534 end_sequence ();
535 emit_insns (tmp);
537 if_info->cond_earliest = if_info->jump;
539 return x;
542 end_sequence ();
545 /* Don't even try if the comparison operands are weird. */
546 if (cond_complex)
547 return NULL_RTX;
549 return emit_store_flag (x, code, XEXP (cond, 0),
550 XEXP (cond, 1), VOIDmode,
551 (code == LTU || code == LEU
552 || code == GEU || code == GTU), normalize);
555 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
556 static void
557 noce_emit_move_insn (x, y)
558 rtx x, y;
560 enum machine_mode outmode, inmode;
561 rtx outer, inner;
562 int bitpos;
564 if (GET_CODE (x) != STRICT_LOW_PART)
566 emit_move_insn (x, y);
567 return;
570 outer = XEXP (x, 0);
571 inner = XEXP (outer, 0);
572 outmode = GET_MODE (outer);
573 inmode = GET_MODE (inner);
574 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
575 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
576 GET_MODE_BITSIZE (inmode));
579 /* Convert "if (test) x = 1; else x = 0".
581 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
582 tried in noce_try_store_flag_constants after noce_try_cmove has had
583 a go at the conversion. */
585 static int
586 noce_try_store_flag (if_info)
587 struct noce_if_info *if_info;
589 int reversep;
590 rtx target, seq;
592 if (GET_CODE (if_info->b) == CONST_INT
593 && INTVAL (if_info->b) == STORE_FLAG_VALUE
594 && if_info->a == const0_rtx)
595 reversep = 0;
596 else if (if_info->b == const0_rtx
597 && GET_CODE (if_info->a) == CONST_INT
598 && INTVAL (if_info->a) == STORE_FLAG_VALUE
599 && (reversed_comparison_code (if_info->cond, if_info->jump)
600 != UNKNOWN))
601 reversep = 1;
602 else
603 return FALSE;
605 start_sequence ();
607 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
608 if (target)
610 if (target != if_info->x)
611 noce_emit_move_insn (if_info->x, target);
613 seq = get_insns ();
614 end_sequence ();
615 emit_insns_before (seq, if_info->jump);
617 return TRUE;
619 else
621 end_sequence ();
622 return FALSE;
626 /* Convert "if (test) x = a; else x = b", for A and B constant. */
628 static int
629 noce_try_store_flag_constants (if_info)
630 struct noce_if_info *if_info;
632 rtx target, seq;
633 int reversep;
634 HOST_WIDE_INT itrue, ifalse, diff, tmp;
635 int normalize, can_reverse;
636 enum machine_mode mode;
638 if (! no_new_pseudos
639 && GET_CODE (if_info->a) == CONST_INT
640 && GET_CODE (if_info->b) == CONST_INT)
642 mode = GET_MODE (if_info->x);
643 ifalse = INTVAL (if_info->a);
644 itrue = INTVAL (if_info->b);
646 /* Make sure we can represent the difference between the two values. */
647 if ((itrue - ifalse > 0)
648 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
649 return FALSE;
651 diff = trunc_int_for_mode (itrue - ifalse, mode);
653 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
654 != UNKNOWN);
656 reversep = 0;
657 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
658 normalize = 0;
659 else if (ifalse == 0 && exact_log2 (itrue) >= 0
660 && (STORE_FLAG_VALUE == 1
661 || BRANCH_COST >= 2))
662 normalize = 1;
663 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
664 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
665 normalize = 1, reversep = 1;
666 else if (itrue == -1
667 && (STORE_FLAG_VALUE == -1
668 || BRANCH_COST >= 2))
669 normalize = -1;
670 else if (ifalse == -1 && can_reverse
671 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
672 normalize = -1, reversep = 1;
673 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
674 || BRANCH_COST >= 3)
675 normalize = -1;
676 else
677 return FALSE;
679 if (reversep)
681 tmp = itrue; itrue = ifalse; ifalse = tmp;
682 diff = trunc_int_for_mode (-diff, mode);
685 start_sequence ();
686 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
687 if (! target)
689 end_sequence ();
690 return FALSE;
693 /* if (test) x = 3; else x = 4;
694 => x = 3 + (test == 0); */
695 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
697 target = expand_simple_binop (mode,
698 (diff == STORE_FLAG_VALUE
699 ? PLUS : MINUS),
700 GEN_INT (ifalse), target, if_info->x, 0,
701 OPTAB_WIDEN);
704 /* if (test) x = 8; else x = 0;
705 => x = (test != 0) << 3; */
706 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
708 target = expand_simple_binop (mode, ASHIFT,
709 target, GEN_INT (tmp), if_info->x, 0,
710 OPTAB_WIDEN);
713 /* if (test) x = -1; else x = b;
714 => x = -(test != 0) | b; */
715 else if (itrue == -1)
717 target = expand_simple_binop (mode, IOR,
718 target, GEN_INT (ifalse), if_info->x, 0,
719 OPTAB_WIDEN);
722 /* if (test) x = a; else x = b;
723 => x = (-(test != 0) & (b - a)) + a; */
724 else
726 target = expand_simple_binop (mode, AND,
727 target, GEN_INT (diff), if_info->x, 0,
728 OPTAB_WIDEN);
729 if (target)
730 target = expand_simple_binop (mode, PLUS,
731 target, GEN_INT (ifalse),
732 if_info->x, 0, OPTAB_WIDEN);
735 if (! target)
737 end_sequence ();
738 return FALSE;
741 if (target != if_info->x)
742 noce_emit_move_insn (if_info->x, target);
744 seq = get_insns ();
745 end_sequence ();
747 if (seq_contains_jump (seq))
748 return FALSE;
750 emit_insns_before (seq, if_info->jump);
752 return TRUE;
755 return FALSE;
758 /* Convert "if (test) foo++" into "foo += (test != 0)", and
759 similarly for "foo--". */
761 static int
762 noce_try_store_flag_inc (if_info)
763 struct noce_if_info *if_info;
765 rtx target, seq;
766 int subtract, normalize;
768 if (! no_new_pseudos
769 && (BRANCH_COST >= 2
770 || HAVE_incscc
771 || HAVE_decscc)
772 /* Should be no `else' case to worry about. */
773 && if_info->b == if_info->x
774 && GET_CODE (if_info->a) == PLUS
775 && (XEXP (if_info->a, 1) == const1_rtx
776 || XEXP (if_info->a, 1) == constm1_rtx)
777 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
778 && (reversed_comparison_code (if_info->cond, if_info->jump)
779 != UNKNOWN))
781 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
782 subtract = 0, normalize = 0;
783 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
784 subtract = 1, normalize = 0;
785 else
786 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
788 start_sequence ();
790 target = noce_emit_store_flag (if_info,
791 gen_reg_rtx (GET_MODE (if_info->x)),
792 1, normalize);
794 if (target)
795 target = expand_simple_binop (GET_MODE (if_info->x),
796 subtract ? MINUS : PLUS,
797 if_info->x, target, if_info->x,
798 0, OPTAB_WIDEN);
799 if (target)
801 if (target != if_info->x)
802 noce_emit_move_insn (if_info->x, target);
804 seq = get_insns ();
805 end_sequence ();
807 if (seq_contains_jump (seq))
808 return FALSE;
810 emit_insns_before (seq, if_info->jump);
812 return TRUE;
815 end_sequence ();
818 return FALSE;
821 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
823 static int
824 noce_try_store_flag_mask (if_info)
825 struct noce_if_info *if_info;
827 rtx target, seq;
828 int reversep;
830 reversep = 0;
831 if (! no_new_pseudos
832 && (BRANCH_COST >= 2
833 || STORE_FLAG_VALUE == -1)
834 && ((if_info->a == const0_rtx
835 && rtx_equal_p (if_info->b, if_info->x))
836 || ((reversep = (reversed_comparison_code (if_info->cond,
837 if_info->jump)
838 != UNKNOWN))
839 && if_info->b == const0_rtx
840 && rtx_equal_p (if_info->a, if_info->x))))
842 start_sequence ();
843 target = noce_emit_store_flag (if_info,
844 gen_reg_rtx (GET_MODE (if_info->x)),
845 reversep, -1);
846 if (target)
847 target = expand_simple_binop (GET_MODE (if_info->x), AND,
848 if_info->x, target, if_info->x, 0,
849 OPTAB_WIDEN);
851 if (target)
853 if (target != if_info->x)
854 noce_emit_move_insn (if_info->x, target);
856 seq = get_insns ();
857 end_sequence ();
859 if (seq_contains_jump (seq))
860 return FALSE;
862 emit_insns_before (seq, if_info->jump);
864 return TRUE;
867 end_sequence ();
870 return FALSE;
873 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
875 static rtx
876 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
877 struct noce_if_info *if_info;
878 rtx x, cmp_a, cmp_b, vfalse, vtrue;
879 enum rtx_code code;
881 /* If earliest == jump, try to build the cmove insn directly.
882 This is helpful when combine has created some complex condition
883 (like for alpha's cmovlbs) that we can't hope to regenerate
884 through the normal interface. */
886 if (if_info->cond_earliest == if_info->jump)
888 rtx tmp;
890 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
891 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
892 tmp = gen_rtx_SET (VOIDmode, x, tmp);
894 start_sequence ();
895 tmp = emit_insn (tmp);
897 if (recog_memoized (tmp) >= 0)
899 tmp = get_insns ();
900 end_sequence ();
901 emit_insns (tmp);
903 return x;
906 end_sequence ();
909 /* Don't even try if the comparison operands are weird. */
910 if (! general_operand (cmp_a, GET_MODE (cmp_a))
911 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
912 return NULL_RTX;
914 #if HAVE_conditional_move
915 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
916 vtrue, vfalse, GET_MODE (x),
917 (code == LTU || code == GEU
918 || code == LEU || code == GTU));
919 #else
920 /* We'll never get here, as noce_process_if_block doesn't call the
921 functions involved. Ifdef code, however, should be discouraged
922 because it leads to typos in the code not selected. However,
923 emit_conditional_move won't exist either. */
924 return NULL_RTX;
925 #endif
928 /* Try only simple constants and registers here. More complex cases
929 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
930 has had a go at it. */
932 static int
933 noce_try_cmove (if_info)
934 struct noce_if_info *if_info;
936 enum rtx_code code;
937 rtx target, seq;
939 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
940 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
942 start_sequence ();
944 code = GET_CODE (if_info->cond);
945 target = noce_emit_cmove (if_info, if_info->x, code,
946 XEXP (if_info->cond, 0),
947 XEXP (if_info->cond, 1),
948 if_info->a, if_info->b);
950 if (target)
952 if (target != if_info->x)
953 noce_emit_move_insn (if_info->x, target);
955 seq = get_insns ();
956 end_sequence ();
957 emit_insns_before (seq, if_info->jump);
958 return TRUE;
960 else
962 end_sequence ();
963 return FALSE;
967 return FALSE;
970 /* Try more complex cases involving conditional_move. */
972 static int
973 noce_try_cmove_arith (if_info)
974 struct noce_if_info *if_info;
976 rtx a = if_info->a;
977 rtx b = if_info->b;
978 rtx x = if_info->x;
979 rtx insn_a, insn_b;
980 rtx tmp, target;
981 int is_mem = 0;
982 enum rtx_code code;
984 /* A conditional move from two memory sources is equivalent to a
985 conditional on their addresses followed by a load. Don't do this
986 early because it'll screw alias analysis. Note that we've
987 already checked for no side effects. */
988 if (! no_new_pseudos && cse_not_expected
989 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
990 && BRANCH_COST >= 5)
992 a = XEXP (a, 0);
993 b = XEXP (b, 0);
994 x = gen_reg_rtx (Pmode);
995 is_mem = 1;
998 /* ??? We could handle this if we knew that a load from A or B could
999 not fault. This is also true if we've already loaded
1000 from the address along the path from ENTRY. */
1001 else if (may_trap_p (a) || may_trap_p (b))
1002 return FALSE;
1004 /* if (test) x = a + b; else x = c - d;
1005 => y = a + b;
1006 x = c - d;
1007 if (test)
1008 x = y;
1011 code = GET_CODE (if_info->cond);
1012 insn_a = if_info->insn_a;
1013 insn_b = if_info->insn_b;
1015 /* Possibly rearrange operands to make things come out more natural. */
1016 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1018 int reversep = 0;
1019 if (rtx_equal_p (b, x))
1020 reversep = 1;
1021 else if (general_operand (b, GET_MODE (b)))
1022 reversep = 1;
1024 if (reversep)
1026 code = reversed_comparison_code (if_info->cond, if_info->jump);
1027 tmp = a, a = b, b = tmp;
1028 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1032 start_sequence ();
1034 /* If either operand is complex, load it into a register first.
1035 The best way to do this is to copy the original insn. In this
1036 way we preserve any clobbers etc that the insn may have had.
1037 This is of course not possible in the IS_MEM case. */
1038 if (! general_operand (a, GET_MODE (a)))
1040 rtx set;
1042 if (no_new_pseudos)
1043 goto end_seq_and_fail;
1045 if (is_mem)
1047 tmp = gen_reg_rtx (GET_MODE (a));
1048 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1050 else if (! insn_a)
1051 goto end_seq_and_fail;
1052 else
1054 a = gen_reg_rtx (GET_MODE (a));
1055 tmp = copy_rtx (insn_a);
1056 set = single_set (tmp);
1057 SET_DEST (set) = a;
1058 tmp = emit_insn (PATTERN (tmp));
1060 if (recog_memoized (tmp) < 0)
1061 goto end_seq_and_fail;
1063 if (! general_operand (b, GET_MODE (b)))
1065 rtx set;
1067 if (no_new_pseudos)
1068 goto end_seq_and_fail;
1070 if (is_mem)
1072 tmp = gen_reg_rtx (GET_MODE (b));
1073 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1075 else if (! insn_b)
1076 goto end_seq_and_fail;
1077 else
1079 b = gen_reg_rtx (GET_MODE (b));
1080 tmp = copy_rtx (insn_b);
1081 set = single_set (tmp);
1082 SET_DEST (set) = b;
1083 tmp = emit_insn (PATTERN (tmp));
1085 if (recog_memoized (tmp) < 0)
1086 goto end_seq_and_fail;
1089 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1090 XEXP (if_info->cond, 1), a, b);
1092 if (! target)
1093 goto end_seq_and_fail;
1095 /* If we're handling a memory for above, emit the load now. */
1096 if (is_mem)
1098 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1100 /* Copy over flags as appropriate. */
1101 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1102 MEM_VOLATILE_P (tmp) = 1;
1103 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1104 MEM_IN_STRUCT_P (tmp) = 1;
1105 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1106 MEM_SCALAR_P (tmp) = 1;
1107 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1108 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1109 set_mem_align (tmp,
1110 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1112 noce_emit_move_insn (if_info->x, tmp);
1114 else if (target != x)
1115 noce_emit_move_insn (x, target);
1117 tmp = get_insns ();
1118 end_sequence ();
1119 emit_insns_before (tmp, if_info->jump);
1120 return TRUE;
1122 end_seq_and_fail:
1123 end_sequence ();
1124 return FALSE;
1127 /* For most cases, the simplified condition we found is the best
1128 choice, but this is not the case for the min/max/abs transforms.
1129 For these we wish to know that it is A or B in the condition. */
1131 static rtx
1132 noce_get_alt_condition (if_info, target, earliest)
1133 struct noce_if_info *if_info;
1134 rtx target;
1135 rtx *earliest;
1137 rtx cond, set, insn;
1138 int reverse;
1140 /* If target is already mentioned in the known condition, return it. */
1141 if (reg_mentioned_p (target, if_info->cond))
1143 *earliest = if_info->cond_earliest;
1144 return if_info->cond;
1147 set = pc_set (if_info->jump);
1148 cond = XEXP (SET_SRC (set), 0);
1149 reverse
1150 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1151 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1153 /* If we're looking for a constant, try to make the conditional
1154 have that constant in it. There are two reasons why it may
1155 not have the constant we want:
1157 1. GCC may have needed to put the constant in a register, because
1158 the target can't compare directly against that constant. For
1159 this case, we look for a SET immediately before the comparison
1160 that puts a constant in that register.
1162 2. GCC may have canonicalized the conditional, for example
1163 replacing "if x < 4" with "if x <= 3". We can undo that (or
1164 make equivalent types of changes) to get the constants we need
1165 if they're off by one in the right direction. */
1167 if (GET_CODE (target) == CONST_INT)
1169 enum rtx_code code = GET_CODE (if_info->cond);
1170 rtx op_a = XEXP (if_info->cond, 0);
1171 rtx op_b = XEXP (if_info->cond, 1);
1172 rtx prev_insn;
1174 /* First, look to see if we put a constant in a register. */
1175 prev_insn = PREV_INSN (if_info->cond_earliest);
1176 if (prev_insn
1177 && INSN_P (prev_insn)
1178 && GET_CODE (PATTERN (prev_insn)) == SET)
1180 rtx src = find_reg_equal_equiv_note (prev_insn);
1181 if (!src)
1182 src = SET_SRC (PATTERN (prev_insn));
1183 if (GET_CODE (src) == CONST_INT)
1185 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1186 op_a = src;
1187 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1188 op_b = src;
1190 if (GET_CODE (op_a) == CONST_INT)
1192 rtx tmp = op_a;
1193 op_a = op_b;
1194 op_b = tmp;
1195 code = swap_condition (code);
1200 /* Now, look to see if we can get the right constant by
1201 adjusting the conditional. */
1202 if (GET_CODE (op_b) == CONST_INT)
1204 HOST_WIDE_INT desired_val = INTVAL (target);
1205 HOST_WIDE_INT actual_val = INTVAL (op_b);
1207 switch (code)
1209 case LT:
1210 if (actual_val == desired_val + 1)
1212 code = LE;
1213 op_b = GEN_INT (desired_val);
1215 break;
1216 case LE:
1217 if (actual_val == desired_val - 1)
1219 code = LT;
1220 op_b = GEN_INT (desired_val);
1222 break;
1223 case GT:
1224 if (actual_val == desired_val - 1)
1226 code = GE;
1227 op_b = GEN_INT (desired_val);
1229 break;
1230 case GE:
1231 if (actual_val == desired_val + 1)
1233 code = GT;
1234 op_b = GEN_INT (desired_val);
1236 break;
1237 default:
1238 break;
1242 /* If we made any changes, generate a new conditional that is
1243 equivalent to what we started with, but has the right
1244 constants in it. */
1245 if (code != GET_CODE (if_info->cond)
1246 || op_a != XEXP (if_info->cond, 0)
1247 || op_b != XEXP (if_info->cond, 1))
1249 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1250 *earliest = if_info->cond_earliest;
1251 return cond;
1255 cond = canonicalize_condition (if_info->jump, cond, reverse,
1256 earliest, target);
1257 if (! cond || ! reg_mentioned_p (target, cond))
1258 return NULL;
1260 /* We almost certainly searched back to a different place.
1261 Need to re-verify correct lifetimes. */
1263 /* X may not be mentioned in the range (cond_earliest, jump]. */
1264 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1265 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1266 return NULL;
1268 /* A and B may not be modified in the range [cond_earliest, jump). */
1269 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1270 if (INSN_P (insn)
1271 && (modified_in_p (if_info->a, insn)
1272 || modified_in_p (if_info->b, insn)))
1273 return NULL;
1275 return cond;
1278 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1280 static int
1281 noce_try_minmax (if_info)
1282 struct noce_if_info *if_info;
1284 rtx cond, earliest, target, seq;
1285 enum rtx_code code, op;
1286 int unsignedp;
1288 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1289 if (no_new_pseudos)
1290 return FALSE;
1292 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1293 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1294 to get the target to tell us... */
1295 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1296 || HONOR_NANS (GET_MODE (if_info->x)))
1297 return FALSE;
1299 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1300 if (!cond)
1301 return FALSE;
1303 /* Verify the condition is of the form we expect, and canonicalize
1304 the comparison code. */
1305 code = GET_CODE (cond);
1306 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1308 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1309 return FALSE;
1311 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1313 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1314 return FALSE;
1315 code = swap_condition (code);
1317 else
1318 return FALSE;
1320 /* Determine what sort of operation this is. Note that the code is for
1321 a taken branch, so the code->operation mapping appears backwards. */
1322 switch (code)
1324 case LT:
1325 case LE:
1326 case UNLT:
1327 case UNLE:
1328 op = SMAX;
1329 unsignedp = 0;
1330 break;
1331 case GT:
1332 case GE:
1333 case UNGT:
1334 case UNGE:
1335 op = SMIN;
1336 unsignedp = 0;
1337 break;
1338 case LTU:
1339 case LEU:
1340 op = UMAX;
1341 unsignedp = 1;
1342 break;
1343 case GTU:
1344 case GEU:
1345 op = UMIN;
1346 unsignedp = 1;
1347 break;
1348 default:
1349 return FALSE;
1352 start_sequence ();
1354 target = expand_simple_binop (GET_MODE (if_info->x), op,
1355 if_info->a, if_info->b,
1356 if_info->x, unsignedp, OPTAB_WIDEN);
1357 if (! target)
1359 end_sequence ();
1360 return FALSE;
1362 if (target != if_info->x)
1363 noce_emit_move_insn (if_info->x, target);
1365 seq = get_insns ();
1366 end_sequence ();
1368 if (seq_contains_jump (seq))
1369 return FALSE;
1371 emit_insns_before (seq, if_info->jump);
1372 if_info->cond = cond;
1373 if_info->cond_earliest = earliest;
1375 return TRUE;
1378 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1380 static int
1381 noce_try_abs (if_info)
1382 struct noce_if_info *if_info;
1384 rtx cond, earliest, target, seq, a, b, c;
1385 int negate;
1387 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1388 if (no_new_pseudos)
1389 return FALSE;
1391 /* Recognize A and B as constituting an ABS or NABS. */
1392 a = if_info->a;
1393 b = if_info->b;
1394 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1395 negate = 0;
1396 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1398 c = a; a = b; b = c;
1399 negate = 1;
1401 else
1402 return FALSE;
1404 cond = noce_get_alt_condition (if_info, b, &earliest);
1405 if (!cond)
1406 return FALSE;
1408 /* Verify the condition is of the form we expect. */
1409 if (rtx_equal_p (XEXP (cond, 0), b))
1410 c = XEXP (cond, 1);
1411 else if (rtx_equal_p (XEXP (cond, 1), b))
1412 c = XEXP (cond, 0);
1413 else
1414 return FALSE;
1416 /* Verify that C is zero. Search backward through the block for
1417 a REG_EQUAL note if necessary. */
1418 if (REG_P (c))
1420 rtx insn, note = NULL;
1421 for (insn = earliest;
1422 insn != if_info->test_bb->head;
1423 insn = PREV_INSN (insn))
1424 if (INSN_P (insn)
1425 && ((note = find_reg_note (insn, REG_EQUAL, c))
1426 || (note = find_reg_note (insn, REG_EQUIV, c))))
1427 break;
1428 if (! note)
1429 return FALSE;
1430 c = XEXP (note, 0);
1432 if (GET_CODE (c) == MEM
1433 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1434 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1435 c = get_pool_constant (XEXP (c, 0));
1437 /* Work around funny ideas get_condition has wrt canonicalization.
1438 Note that these rtx constants are known to be CONST_INT, and
1439 therefore imply integer comparisons. */
1440 if (c == constm1_rtx && GET_CODE (cond) == GT)
1442 else if (c == const1_rtx && GET_CODE (cond) == LT)
1444 else if (c != CONST0_RTX (GET_MODE (b)))
1445 return FALSE;
1447 /* Determine what sort of operation this is. */
1448 switch (GET_CODE (cond))
1450 case LT:
1451 case LE:
1452 case UNLT:
1453 case UNLE:
1454 negate = !negate;
1455 break;
1456 case GT:
1457 case GE:
1458 case UNGT:
1459 case UNGE:
1460 break;
1461 default:
1462 return FALSE;
1465 start_sequence ();
1467 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1469 /* ??? It's a quandry whether cmove would be better here, especially
1470 for integers. Perhaps combine will clean things up. */
1471 if (target && negate)
1472 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1474 if (! target)
1476 end_sequence ();
1477 return FALSE;
1480 if (target != if_info->x)
1481 noce_emit_move_insn (if_info->x, target);
1483 seq = get_insns ();
1484 end_sequence ();
1486 if (seq_contains_jump (seq))
1487 return FALSE;
1489 emit_insns_before (seq, if_info->jump);
1490 if_info->cond = cond;
1491 if_info->cond_earliest = earliest;
1493 return TRUE;
1496 /* Look for the condition for the jump first. We'd prefer to avoid
1497 get_condition if we can -- it tries to look back for the contents
1498 of an original compare. On targets that use normal integers for
1499 comparisons, e.g. alpha, this is wasteful. */
1501 static rtx
1502 noce_get_condition (jump, earliest)
1503 rtx jump;
1504 rtx *earliest;
1506 rtx cond;
1507 rtx set;
1509 /* If the condition variable is a register and is MODE_INT, accept it.
1510 Otherwise, fall back on get_condition. */
1512 if (! any_condjump_p (jump))
1513 return NULL_RTX;
1515 set = pc_set (jump);
1517 cond = XEXP (SET_SRC (set), 0);
1518 if (GET_CODE (XEXP (cond, 0)) == REG
1519 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1521 *earliest = jump;
1523 /* If this branches to JUMP_LABEL when the condition is false,
1524 reverse the condition. */
1525 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1526 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1527 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1528 GET_MODE (cond), XEXP (cond, 0),
1529 XEXP (cond, 1));
1531 else
1532 cond = get_condition (jump, earliest);
1534 return cond;
1537 /* Return true if OP is ok for if-then-else processing. */
1539 static int
1540 noce_operand_ok (op)
1541 rtx op;
1543 /* We special-case memories, so handle any of them with
1544 no address side effects. */
1545 if (GET_CODE (op) == MEM)
1546 return ! side_effects_p (XEXP (op, 0));
1548 if (side_effects_p (op))
1549 return FALSE;
1551 return ! may_trap_p (op);
1554 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1555 without using conditional execution. Return TRUE if we were
1556 successful at converting the the block. */
1558 static int
1559 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1560 basic_block test_bb; /* Basic block test is in */
1561 basic_block then_bb; /* Basic block for THEN block */
1562 basic_block else_bb; /* Basic block for ELSE block */
1563 basic_block join_bb; /* Basic block the join label is in */
1565 /* We're looking for patterns of the form
1567 (1) if (...) x = a; else x = b;
1568 (2) x = b; if (...) x = a;
1569 (3) if (...) x = a; // as if with an initial x = x.
1571 The later patterns require jumps to be more expensive.
1573 ??? For future expansion, look for multiple X in such patterns. */
1575 struct noce_if_info if_info;
1576 rtx insn_a, insn_b;
1577 rtx set_a, set_b;
1578 rtx orig_x, x, a, b;
1579 rtx jump, cond, insn;
1581 /* If this is not a standard conditional jump, we can't parse it. */
1582 jump = test_bb->end;
1583 cond = noce_get_condition (jump, &if_info.cond_earliest);
1584 if (! cond)
1585 return FALSE;
1587 /* If the conditional jump is more than just a conditional jump,
1588 then we can not do if-conversion on this block. */
1589 if (! onlyjump_p (jump))
1590 return FALSE;
1592 /* We must be comparing objects whose modes imply the size. */
1593 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1594 return FALSE;
1596 /* Look for one of the potential sets. */
1597 insn_a = first_active_insn (then_bb);
1598 if (! insn_a
1599 || ! last_active_insn_p (then_bb, insn_a)
1600 || (set_a = single_set (insn_a)) == NULL_RTX)
1601 return FALSE;
1603 x = SET_DEST (set_a);
1604 a = SET_SRC (set_a);
1606 /* Look for the other potential set. Make sure we've got equivalent
1607 destinations. */
1608 /* ??? This is overconservative. Storing to two different mems is
1609 as easy as conditionally computing the address. Storing to a
1610 single mem merely requires a scratch memory to use as one of the
1611 destination addresses; often the memory immediately below the
1612 stack pointer is available for this. */
1613 set_b = NULL_RTX;
1614 if (else_bb)
1616 insn_b = first_active_insn (else_bb);
1617 if (! insn_b
1618 || ! last_active_insn_p (else_bb, insn_b)
1619 || (set_b = single_set (insn_b)) == NULL_RTX
1620 || ! rtx_equal_p (x, SET_DEST (set_b)))
1621 return FALSE;
1623 else
1625 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1626 if (! insn_b
1627 || GET_CODE (insn_b) != INSN
1628 || (set_b = single_set (insn_b)) == NULL_RTX
1629 || ! rtx_equal_p (x, SET_DEST (set_b))
1630 || reg_mentioned_p (x, cond)
1631 || reg_mentioned_p (x, a)
1632 || reg_mentioned_p (x, SET_SRC (set_b)))
1633 insn_b = set_b = NULL_RTX;
1635 b = (set_b ? SET_SRC (set_b) : x);
1637 /* X may not be mentioned in the range (cond_earliest, jump]. */
1638 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1639 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1640 return FALSE;
1642 /* A and B may not be modified in the range [cond_earliest, jump). */
1643 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1644 if (INSN_P (insn)
1645 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1646 return FALSE;
1648 /* Only operate on register destinations, and even then avoid extending
1649 the lifetime of hard registers on small register class machines. */
1650 orig_x = x;
1651 if (GET_CODE (x) != REG
1652 || (SMALL_REGISTER_CLASSES
1653 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1655 if (no_new_pseudos)
1656 return FALSE;
1657 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1658 ? XEXP (x, 0) : x));
1661 /* Don't operate on sources that may trap or are volatile. */
1662 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1663 return FALSE;
1665 /* Set up the info block for our subroutines. */
1666 if_info.test_bb = test_bb;
1667 if_info.cond = cond;
1668 if_info.jump = jump;
1669 if_info.insn_a = insn_a;
1670 if_info.insn_b = insn_b;
1671 if_info.x = x;
1672 if_info.a = a;
1673 if_info.b = b;
1675 /* Try optimizations in some approximation of a useful order. */
1676 /* ??? Should first look to see if X is live incoming at all. If it
1677 isn't, we don't need anything but an unconditional set. */
1679 /* Look and see if A and B are really the same. Avoid creating silly
1680 cmove constructs that no one will fix up later. */
1681 if (rtx_equal_p (a, b))
1683 /* If we have an INSN_B, we don't have to create any new rtl. Just
1684 move the instruction that we already have. If we don't have an
1685 INSN_B, that means that A == X, and we've got a noop move. In
1686 that case don't do anything and let the code below delete INSN_A. */
1687 if (insn_b && else_bb)
1689 rtx note;
1691 if (else_bb && insn_b == else_bb->end)
1692 else_bb->end = PREV_INSN (insn_b);
1693 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1695 /* If there was a REG_EQUAL note, delete it since it may have been
1696 true due to this insn being after a jump. */
1697 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1698 remove_note (insn_b, note);
1700 insn_b = NULL_RTX;
1702 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1703 x must be executed twice. */
1704 else if (insn_b && side_effects_p (orig_x))
1705 return FALSE;
1707 x = orig_x;
1708 goto success;
1711 if (noce_try_store_flag (&if_info))
1712 goto success;
1713 if (noce_try_minmax (&if_info))
1714 goto success;
1715 if (noce_try_abs (&if_info))
1716 goto success;
1717 if (HAVE_conditional_move
1718 && noce_try_cmove (&if_info))
1719 goto success;
1720 if (! HAVE_conditional_execution)
1722 if (noce_try_store_flag_constants (&if_info))
1723 goto success;
1724 if (noce_try_store_flag_inc (&if_info))
1725 goto success;
1726 if (noce_try_store_flag_mask (&if_info))
1727 goto success;
1728 if (HAVE_conditional_move
1729 && noce_try_cmove_arith (&if_info))
1730 goto success;
1733 return FALSE;
1735 success:
1736 /* The original sets may now be killed. */
1737 delete_insn (insn_a);
1739 /* Several special cases here: First, we may have reused insn_b above,
1740 in which case insn_b is now NULL. Second, we want to delete insn_b
1741 if it came from the ELSE block, because follows the now correct
1742 write that appears in the TEST block. However, if we got insn_b from
1743 the TEST block, it may in fact be loading data needed for the comparison.
1744 We'll let life_analysis remove the insn if it's really dead. */
1745 if (insn_b && else_bb)
1746 delete_insn (insn_b);
1748 /* The new insns will have been inserted just before the jump. We should
1749 be able to remove the jump with impunity, but the condition itself may
1750 have been modified by gcse to be shared across basic blocks. */
1751 delete_insn (jump);
1753 /* If we used a temporary, fix it up now. */
1754 if (orig_x != x)
1756 start_sequence ();
1757 noce_emit_move_insn (copy_rtx (orig_x), x);
1758 insn_b = gen_sequence ();
1759 end_sequence ();
1761 emit_insn_after (insn_b, test_bb->end);
1764 /* Merge the blocks! */
1765 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1767 return TRUE;
1770 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1771 straight line code. Return true if successful. */
1773 static int
1774 process_if_block (test_bb, then_bb, else_bb, join_bb)
1775 basic_block test_bb; /* Basic block test is in */
1776 basic_block then_bb; /* Basic block for THEN block */
1777 basic_block else_bb; /* Basic block for ELSE block */
1778 basic_block join_bb; /* Basic block the join label is in */
1780 if (! reload_completed
1781 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1782 return TRUE;
1784 if (HAVE_conditional_execution
1785 && reload_completed
1786 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1787 return TRUE;
1789 return FALSE;
1792 /* Merge the blocks and mark for local life update. */
1794 static void
1795 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1796 basic_block test_bb; /* Basic block test is in */
1797 basic_block then_bb; /* Basic block for THEN block */
1798 basic_block else_bb; /* Basic block for ELSE block */
1799 basic_block join_bb; /* Basic block the join label is in */
1801 basic_block combo_bb;
1803 /* All block merging is done into the lower block numbers. */
1805 combo_bb = test_bb;
1807 /* First merge TEST block into THEN block. This is a no-brainer since
1808 the THEN block did not have a code label to begin with. */
1809 if (then_bb)
1811 if (combo_bb->global_live_at_end)
1812 COPY_REG_SET (combo_bb->global_live_at_end,
1813 then_bb->global_live_at_end);
1814 merge_blocks_nomove (combo_bb, then_bb);
1815 num_removed_blocks++;
1818 /* The ELSE block, if it existed, had a label. That label count
1819 will almost always be zero, but odd things can happen when labels
1820 get their addresses taken. */
1821 if (else_bb)
1823 merge_blocks_nomove (combo_bb, else_bb);
1824 num_removed_blocks++;
1827 /* If there was no join block reported, that means it was not adjacent
1828 to the others, and so we cannot merge them. */
1830 if (! join_bb)
1832 rtx last = combo_bb->end;
1834 /* The outgoing edge for the current COMBO block should already
1835 be correct. Verify this. */
1836 if (combo_bb->succ == NULL_EDGE)
1838 if (find_reg_note (last, REG_NORETURN, NULL))
1840 else if (GET_CODE (last) == INSN
1841 && GET_CODE (PATTERN (last)) == TRAP_IF
1842 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
1844 else
1845 abort ();
1848 /* There should still be something at the end of the THEN or ELSE
1849 blocks taking us to our final destination. */
1850 else if (GET_CODE (last) == JUMP_INSN)
1852 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
1853 && GET_CODE (last) == CALL_INSN
1854 && SIBLING_CALL_P (last))
1856 else if ((combo_bb->succ->flags & EDGE_EH)
1857 && can_throw_internal (last))
1859 else
1860 abort ();
1863 /* The JOIN block may have had quite a number of other predecessors too.
1864 Since we've already merged the TEST, THEN and ELSE blocks, we should
1865 have only one remaining edge from our if-then-else diamond. If there
1866 is more than one remaining edge, it must come from elsewhere. There
1867 may be zero incoming edges if the THEN block didn't actually join
1868 back up (as with a call to abort). */
1869 else if ((join_bb->pred == NULL
1870 || join_bb->pred->pred_next == NULL)
1871 && join_bb != EXIT_BLOCK_PTR)
1873 /* We can merge the JOIN. */
1874 if (combo_bb->global_live_at_end)
1875 COPY_REG_SET (combo_bb->global_live_at_end,
1876 join_bb->global_live_at_end);
1877 merge_blocks_nomove (combo_bb, join_bb);
1878 num_removed_blocks++;
1880 else
1882 /* We cannot merge the JOIN. */
1884 /* The outgoing edge for the current COMBO block should already
1885 be correct. Verify this. */
1886 if (combo_bb->succ->succ_next != NULL_EDGE
1887 || combo_bb->succ->dest != join_bb)
1888 abort ();
1890 /* Remove the jump and cruft from the end of the COMBO block. */
1891 if (join_bb != EXIT_BLOCK_PTR)
1892 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1895 num_updated_if_blocks++;
1898 /* Find a block ending in a simple IF condition. Return TRUE if
1899 we were able to transform it in some way. */
1901 static int
1902 find_if_header (test_bb)
1903 basic_block test_bb;
1905 edge then_edge;
1906 edge else_edge;
1908 /* The kind of block we're looking for has exactly two successors. */
1909 if ((then_edge = test_bb->succ) == NULL_EDGE
1910 || (else_edge = then_edge->succ_next) == NULL_EDGE
1911 || else_edge->succ_next != NULL_EDGE)
1912 return FALSE;
1914 /* Neither edge should be abnormal. */
1915 if ((then_edge->flags & EDGE_COMPLEX)
1916 || (else_edge->flags & EDGE_COMPLEX))
1917 return FALSE;
1919 /* The THEN edge is canonically the one that falls through. */
1920 if (then_edge->flags & EDGE_FALLTHRU)
1922 else if (else_edge->flags & EDGE_FALLTHRU)
1924 edge e = else_edge;
1925 else_edge = then_edge;
1926 then_edge = e;
1928 else
1929 /* Otherwise this must be a multiway branch of some sort. */
1930 return FALSE;
1932 if (find_if_block (test_bb, then_edge, else_edge))
1933 goto success;
1934 if (HAVE_trap && HAVE_conditional_trap
1935 && find_cond_trap (test_bb, then_edge, else_edge))
1936 goto success;
1937 if (post_dominators
1938 && (! HAVE_conditional_execution || reload_completed))
1940 if (find_if_case_1 (test_bb, then_edge, else_edge))
1941 goto success;
1942 if (find_if_case_2 (test_bb, then_edge, else_edge))
1943 goto success;
1946 return FALSE;
1948 success:
1949 if (rtl_dump_file)
1950 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1951 return TRUE;
1954 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1955 block. If so, we'll try to convert the insns to not require the branch.
1956 Return TRUE if we were successful at converting the the block. */
1958 static int
1959 find_if_block (test_bb, then_edge, else_edge)
1960 basic_block test_bb;
1961 edge then_edge, else_edge;
1963 basic_block then_bb = then_edge->dest;
1964 basic_block else_bb = else_edge->dest;
1965 basic_block join_bb = NULL_BLOCK;
1966 edge then_succ = then_bb->succ;
1967 edge else_succ = else_bb->succ;
1968 basic_block next;
1970 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1971 if (then_bb->pred->pred_next != NULL_EDGE)
1972 return FALSE;
1974 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1975 if (then_succ != NULL_EDGE
1976 && (then_succ->succ_next != NULL_EDGE
1977 || (then_succ->flags & EDGE_COMPLEX)))
1978 return FALSE;
1980 /* If the THEN block has no successors, conditional execution can still
1981 make a conditional call. Don't do this unless the ELSE block has
1982 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1983 Check for the last insn of the THEN block being an indirect jump, which
1984 is listed as not having any successors, but confuses the rest of the CE
1985 code processing. XXX we should fix this in the future. */
1986 if (then_succ == NULL)
1988 if (else_bb->pred->pred_next == NULL_EDGE)
1990 rtx last_insn = then_bb->end;
1992 while (last_insn
1993 && GET_CODE (last_insn) == NOTE
1994 && last_insn != then_bb->head)
1995 last_insn = PREV_INSN (last_insn);
1997 if (last_insn
1998 && GET_CODE (last_insn) == JUMP_INSN
1999 && ! simplejump_p (last_insn))
2000 return FALSE;
2002 join_bb = else_bb;
2003 else_bb = NULL_BLOCK;
2005 else
2006 return FALSE;
2009 /* If the THEN block's successor is the other edge out of the TEST block,
2010 then we have an IF-THEN combo without an ELSE. */
2011 else if (then_succ->dest == else_bb)
2013 join_bb = else_bb;
2014 else_bb = NULL_BLOCK;
2017 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2018 has exactly one predecessor and one successor, and the outgoing edge
2019 is not complex, then we have an IF-THEN-ELSE combo. */
2020 else if (else_succ != NULL_EDGE
2021 && then_succ->dest == else_succ->dest
2022 && else_bb->pred->pred_next == NULL_EDGE
2023 && else_succ->succ_next == NULL_EDGE
2024 && ! (else_succ->flags & EDGE_COMPLEX))
2025 join_bb = else_succ->dest;
2027 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2028 else
2029 return FALSE;
2031 num_possible_if_blocks++;
2033 if (rtl_dump_file)
2035 if (else_bb)
2036 fprintf (rtl_dump_file,
2037 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2038 test_bb->index, then_bb->index, else_bb->index,
2039 join_bb->index);
2040 else
2041 fprintf (rtl_dump_file,
2042 "\nIF-THEN block found, start %d, then %d, join %d\n",
2043 test_bb->index, then_bb->index, join_bb->index);
2046 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2047 get the first condition for free, since we've already asserted that
2048 there's a fallthru edge from IF to THEN. */
2049 /* ??? As an enhancement, move the ELSE block. Have to deal with
2050 BLOCK notes, if by no other means than aborting the merge if they
2051 exist. Sticky enough I don't want to think about it now. */
2052 next = then_bb;
2053 if (else_bb && (next = next->next_bb) != else_bb)
2054 return FALSE;
2055 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2057 if (else_bb)
2058 join_bb = NULL;
2059 else
2060 return FALSE;
2063 /* Do the real work. */
2064 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2067 /* Convert a branch over a trap, or a branch to a trap,
2068 into a conditional trap. */
2070 static int
2071 find_cond_trap (test_bb, then_edge, else_edge)
2072 basic_block test_bb;
2073 edge then_edge, else_edge;
2075 basic_block then_bb, else_bb, trap_bb, other_bb;
2076 rtx trap, jump, cond, cond_earliest, seq;
2077 enum rtx_code code;
2079 then_bb = then_edge->dest;
2080 else_bb = else_edge->dest;
2082 /* Locate the block with the trap instruction. */
2083 /* ??? While we look for no successors, we really ought to allow
2084 EH successors. Need to fix merge_if_block for that to work. */
2085 if ((trap = block_has_only_trap (then_bb)) != NULL)
2086 trap_bb = then_bb, other_bb = else_bb;
2087 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2088 trap_bb = else_bb, other_bb = then_bb;
2089 else
2090 return FALSE;
2092 if (rtl_dump_file)
2094 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2095 test_bb->index, trap_bb->index);
2098 /* If this is not a standard conditional jump, we can't parse it. */
2099 jump = test_bb->end;
2100 cond = noce_get_condition (jump, &cond_earliest);
2101 if (! cond)
2102 return FALSE;
2104 /* If the conditional jump is more than just a conditional jump,
2105 then we can not do if-conversion on this block. */
2106 if (! onlyjump_p (jump))
2107 return FALSE;
2109 /* We must be comparing objects whose modes imply the size. */
2110 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2111 return FALSE;
2113 /* Reverse the comparison code, if necessary. */
2114 code = GET_CODE (cond);
2115 if (then_bb == trap_bb)
2117 code = reversed_comparison_code (cond, jump);
2118 if (code == UNKNOWN)
2119 return FALSE;
2122 /* Attempt to generate the conditional trap. */
2123 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2124 TRAP_CODE (PATTERN (trap)));
2125 if (seq == NULL)
2126 return FALSE;
2128 /* Emit the new insns before cond_earliest. */
2129 emit_insn_before (seq, cond_earliest);
2131 /* Delete the trap block if possible. */
2132 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2133 if (trap_bb->pred == NULL)
2135 flow_delete_block (trap_bb);
2136 num_removed_blocks++;
2139 /* If the non-trap block and the test are now adjacent, merge them.
2140 Otherwise we must insert a direct branch. */
2141 if (test_bb->next_bb == other_bb)
2143 delete_insn (jump);
2144 merge_if_block (test_bb, NULL, NULL, other_bb);
2146 else
2148 rtx lab, newjump;
2150 lab = JUMP_LABEL (jump);
2151 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2152 LABEL_NUSES (lab) += 1;
2153 JUMP_LABEL (newjump) = lab;
2154 emit_barrier_after (newjump);
2156 delete_insn (jump);
2159 return TRUE;
2162 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2163 return it. */
2165 static rtx
2166 block_has_only_trap (bb)
2167 basic_block bb;
2169 rtx trap;
2171 /* We're not the exit block. */
2172 if (bb == EXIT_BLOCK_PTR)
2173 return NULL_RTX;
2175 /* The block must have no successors. */
2176 if (bb->succ)
2177 return NULL_RTX;
2179 /* The only instruction in the THEN block must be the trap. */
2180 trap = first_active_insn (bb);
2181 if (! (trap == bb->end
2182 && GET_CODE (PATTERN (trap)) == TRAP_IF
2183 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2184 return NULL_RTX;
2186 return trap;
2189 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2190 transformable, but not necessarily the other. There need be no
2191 JOIN block.
2193 Return TRUE if we were successful at converting the the block.
2195 Cases we'd like to look at:
2198 if (test) goto over; // x not live
2199 x = a;
2200 goto label;
2201 over:
2203 becomes
2205 x = a;
2206 if (! test) goto label;
2209 if (test) goto E; // x not live
2210 x = big();
2211 goto L;
2213 x = b;
2214 goto M;
2216 becomes
2218 x = b;
2219 if (test) goto M;
2220 x = big();
2221 goto L;
2223 (3) // This one's really only interesting for targets that can do
2224 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2225 // it results in multiple branches on a cache line, which often
2226 // does not sit well with predictors.
2228 if (test1) goto E; // predicted not taken
2229 x = a;
2230 if (test2) goto F;
2233 x = b;
2236 becomes
2238 x = a;
2239 if (test1) goto E;
2240 if (test2) goto F;
2242 Notes:
2244 (A) Don't do (2) if the branch is predicted against the block we're
2245 eliminating. Do it anyway if we can eliminate a branch; this requires
2246 that the sole successor of the eliminated block postdominate the other
2247 side of the if.
2249 (B) With CE, on (3) we can steal from both sides of the if, creating
2251 if (test1) x = a;
2252 if (!test1) x = b;
2253 if (test1) goto J;
2254 if (test2) goto F;
2258 Again, this is most useful if J postdominates.
2260 (C) CE substitutes for helpful life information.
2262 (D) These heuristics need a lot of work. */
2264 /* Tests for case 1 above. */
2266 static int
2267 find_if_case_1 (test_bb, then_edge, else_edge)
2268 basic_block test_bb;
2269 edge then_edge, else_edge;
2271 basic_block then_bb = then_edge->dest;
2272 basic_block else_bb = else_edge->dest, new_bb;
2273 edge then_succ = then_bb->succ;
2274 int then_bb_index;
2276 /* THEN has one successor. */
2277 if (!then_succ || then_succ->succ_next != NULL)
2278 return FALSE;
2280 /* THEN does not fall through, but is not strange either. */
2281 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2282 return FALSE;
2284 /* THEN has one predecessor. */
2285 if (then_bb->pred->pred_next != NULL)
2286 return FALSE;
2288 /* THEN must do something. */
2289 if (forwarder_block_p (then_bb))
2290 return FALSE;
2292 num_possible_if_blocks++;
2293 if (rtl_dump_file)
2294 fprintf (rtl_dump_file,
2295 "\nIF-CASE-1 found, start %d, then %d\n",
2296 test_bb->index, then_bb->index);
2298 /* THEN is small. */
2299 if (count_bb_insns (then_bb) > BRANCH_COST)
2300 return FALSE;
2302 /* Registers set are dead, or are predicable. */
2303 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2304 then_bb->succ->dest, 1))
2305 return FALSE;
2307 /* Conversion went ok, including moving the insns and fixing up the
2308 jump. Adjust the CFG to match. */
2310 bitmap_operation (test_bb->global_live_at_end,
2311 else_bb->global_live_at_start,
2312 then_bb->global_live_at_end, BITMAP_IOR);
2314 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2315 then_bb_index = then_bb->index;
2316 flow_delete_block (then_bb);
2317 /* Make rest of code believe that the newly created block is the THEN_BB
2318 block we removed. */
2319 if (new_bb)
2321 new_bb->index = then_bb_index;
2322 BASIC_BLOCK (then_bb_index) = new_bb;
2324 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2325 later. */
2327 num_removed_blocks++;
2328 num_updated_if_blocks++;
2330 return TRUE;
2333 /* Test for case 2 above. */
2335 static int
2336 find_if_case_2 (test_bb, then_edge, else_edge)
2337 basic_block test_bb;
2338 edge then_edge, else_edge;
2340 basic_block then_bb = then_edge->dest;
2341 basic_block else_bb = else_edge->dest;
2342 edge else_succ = else_bb->succ;
2343 rtx note;
2345 /* ELSE has one successor. */
2346 if (!else_succ || else_succ->succ_next != NULL)
2347 return FALSE;
2349 /* ELSE outgoing edge is not complex. */
2350 if (else_succ->flags & EDGE_COMPLEX)
2351 return FALSE;
2353 /* ELSE has one predecessor. */
2354 if (else_bb->pred->pred_next != NULL)
2355 return FALSE;
2357 /* THEN is not EXIT. */
2358 if (then_bb->index < 0)
2359 return FALSE;
2361 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2362 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2363 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2365 else if (else_succ->dest->index < 0
2366 || TEST_BIT (post_dominators[then_bb->index],
2367 else_succ->dest->index))
2369 else
2370 return FALSE;
2372 num_possible_if_blocks++;
2373 if (rtl_dump_file)
2374 fprintf (rtl_dump_file,
2375 "\nIF-CASE-2 found, start %d, else %d\n",
2376 test_bb->index, else_bb->index);
2378 /* ELSE is small. */
2379 if (count_bb_insns (then_bb) > BRANCH_COST)
2380 return FALSE;
2382 /* Registers set are dead, or are predicable. */
2383 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2384 return FALSE;
2386 /* Conversion went ok, including moving the insns and fixing up the
2387 jump. Adjust the CFG to match. */
2389 bitmap_operation (test_bb->global_live_at_end,
2390 then_bb->global_live_at_start,
2391 else_bb->global_live_at_end, BITMAP_IOR);
2393 flow_delete_block (else_bb);
2395 num_removed_blocks++;
2396 num_updated_if_blocks++;
2398 /* ??? We may now fallthru from one of THEN's successors into a join
2399 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2401 return TRUE;
2404 /* A subroutine of dead_or_predicable called through for_each_rtx.
2405 Return 1 if a memory is found. */
2407 static int
2408 find_memory (px, data)
2409 rtx *px;
2410 void *data ATTRIBUTE_UNUSED;
2412 return GET_CODE (*px) == MEM;
2415 /* Used by the code above to perform the actual rtl transformations.
2416 Return TRUE if successful.
2418 TEST_BB is the block containing the conditional branch. MERGE_BB
2419 is the block containing the code to manipulate. NEW_DEST is the
2420 label TEST_BB should be branching to after the conversion.
2421 REVERSEP is true if the sense of the branch should be reversed. */
2423 static int
2424 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2425 basic_block test_bb, merge_bb, other_bb;
2426 basic_block new_dest;
2427 int reversep;
2429 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2431 jump = test_bb->end;
2433 /* Find the extent of the real code in the merge block. */
2434 head = merge_bb->head;
2435 end = merge_bb->end;
2437 if (GET_CODE (head) == CODE_LABEL)
2438 head = NEXT_INSN (head);
2439 if (GET_CODE (head) == NOTE)
2441 if (head == end)
2443 head = end = NULL_RTX;
2444 goto no_body;
2446 head = NEXT_INSN (head);
2449 if (GET_CODE (end) == JUMP_INSN)
2451 if (head == end)
2453 head = end = NULL_RTX;
2454 goto no_body;
2456 end = PREV_INSN (end);
2459 /* Disable handling dead code by conditional execution if the machine needs
2460 to do anything funny with the tests, etc. */
2461 #ifndef IFCVT_MODIFY_TESTS
2462 if (HAVE_conditional_execution)
2464 /* In the conditional execution case, we have things easy. We know
2465 the condition is reversable. We don't have to check life info,
2466 becase we're going to conditionally execute the code anyway.
2467 All that's left is making sure the insns involved can actually
2468 be predicated. */
2470 rtx cond, prob_val;
2472 cond = cond_exec_get_condition (jump);
2473 if (! cond)
2474 return FALSE;
2476 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2477 if (prob_val)
2478 prob_val = XEXP (prob_val, 0);
2480 if (reversep)
2482 enum rtx_code rev = reversed_comparison_code (cond, jump);
2483 if (rev == UNKNOWN)
2484 return FALSE;
2485 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2486 XEXP (cond, 1));
2487 if (prob_val)
2488 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2491 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2492 goto cancel;
2494 earliest = jump;
2496 else
2497 #endif
2499 /* In the non-conditional execution case, we have to verify that there
2500 are no trapping operations, no calls, no references to memory, and
2501 that any registers modified are dead at the branch site. */
2503 rtx insn, cond, prev;
2504 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2505 regset merge_set, tmp, test_live, test_set;
2506 struct propagate_block_info *pbi;
2507 int i, fail = 0;
2509 /* Check for no calls or trapping operations. */
2510 for (insn = head; ; insn = NEXT_INSN (insn))
2512 if (GET_CODE (insn) == CALL_INSN)
2513 return FALSE;
2514 if (INSN_P (insn))
2516 if (may_trap_p (PATTERN (insn)))
2517 return FALSE;
2519 /* ??? Even non-trapping memories such as stack frame
2520 references must be avoided. For stores, we collect
2521 no lifetime info; for reads, we'd have to assert
2522 true_dependence false against every store in the
2523 TEST range. */
2524 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2525 return FALSE;
2527 if (insn == end)
2528 break;
2531 if (! any_condjump_p (jump))
2532 return FALSE;
2534 /* Find the extent of the conditional. */
2535 cond = noce_get_condition (jump, &earliest);
2536 if (! cond)
2537 return FALSE;
2539 /* Collect:
2540 MERGE_SET = set of registers set in MERGE_BB
2541 TEST_LIVE = set of registers live at EARLIEST
2542 TEST_SET = set of registers set between EARLIEST and the
2543 end of the block. */
2545 tmp = INITIALIZE_REG_SET (tmp_head);
2546 merge_set = INITIALIZE_REG_SET (merge_set_head);
2547 test_live = INITIALIZE_REG_SET (test_live_head);
2548 test_set = INITIALIZE_REG_SET (test_set_head);
2550 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2551 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2552 since we've already asserted that MERGE_BB is small. */
2553 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2555 /* For small register class machines, don't lengthen lifetimes of
2556 hard registers before reload. */
2557 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2559 EXECUTE_IF_SET_IN_BITMAP
2560 (merge_set, 0, i,
2562 if (i < FIRST_PSEUDO_REGISTER
2563 && ! fixed_regs[i]
2564 && ! global_regs[i])
2565 fail = 1;
2569 /* For TEST, we're interested in a range of insns, not a whole block.
2570 Moreover, we're interested in the insns live from OTHER_BB. */
2572 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2573 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2576 for (insn = jump; ; insn = prev)
2578 prev = propagate_one_insn (pbi, insn);
2579 if (insn == earliest)
2580 break;
2583 free_propagate_block_info (pbi);
2585 /* We can perform the transformation if
2586 MERGE_SET & (TEST_SET | TEST_LIVE)
2588 TEST_SET & merge_bb->global_live_at_start
2589 are empty. */
2591 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2592 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2593 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2595 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2596 BITMAP_AND);
2597 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2599 FREE_REG_SET (tmp);
2600 FREE_REG_SET (merge_set);
2601 FREE_REG_SET (test_live);
2602 FREE_REG_SET (test_set);
2604 if (fail)
2605 return FALSE;
2608 no_body:
2609 /* We don't want to use normal invert_jump or redirect_jump because
2610 we don't want to delete_insn called. Also, we want to do our own
2611 change group management. */
2613 old_dest = JUMP_LABEL (jump);
2614 if (other_bb != new_dest)
2616 new_label = block_label (new_dest);
2617 if (reversep
2618 ? ! invert_jump_1 (jump, new_label)
2619 : ! redirect_jump_1 (jump, new_label))
2620 goto cancel;
2623 if (! apply_change_group ())
2624 return FALSE;
2626 if (other_bb != new_dest)
2628 if (old_dest)
2629 LABEL_NUSES (old_dest) -= 1;
2630 if (new_label)
2631 LABEL_NUSES (new_label) += 1;
2632 JUMP_LABEL (jump) = new_label;
2633 if (reversep)
2634 invert_br_probabilities (jump);
2636 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2637 if (reversep)
2639 gcov_type count, probability;
2640 count = BRANCH_EDGE (test_bb)->count;
2641 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2642 FALLTHRU_EDGE (test_bb)->count = count;
2643 probability = BRANCH_EDGE (test_bb)->probability;
2644 BRANCH_EDGE (test_bb)->probability
2645 = FALLTHRU_EDGE (test_bb)->probability;
2646 FALLTHRU_EDGE (test_bb)->probability = probability;
2647 update_br_prob_note (test_bb);
2651 /* Move the insns out of MERGE_BB to before the branch. */
2652 if (head != NULL)
2654 if (end == merge_bb->end)
2655 merge_bb->end = PREV_INSN (head);
2657 if (squeeze_notes (&head, &end))
2658 return TRUE;
2660 reorder_insns (head, end, PREV_INSN (earliest));
2663 /* Remove the jump and edge if we can. */
2664 if (other_bb == new_dest)
2666 delete_insn (jump);
2667 remove_edge (BRANCH_EDGE (test_bb));
2668 /* ??? Can't merge blocks here, as then_bb is still in use.
2669 At minimum, the merge will get done just before bb-reorder. */
2672 return TRUE;
2674 cancel:
2675 cancel_changes (0);
2676 return FALSE;
2679 /* Main entry point for all if-conversion. */
2681 void
2682 if_convert (x_life_data_ok)
2683 int x_life_data_ok;
2685 basic_block bb;
2687 num_possible_if_blocks = 0;
2688 num_updated_if_blocks = 0;
2689 num_removed_blocks = 0;
2690 life_data_ok = (x_life_data_ok != 0);
2692 /* Free up basic_block_for_insn so that we don't have to keep it
2693 up to date, either here or in merge_blocks_nomove. */
2694 free_basic_block_vars (1);
2696 /* Compute postdominators if we think we'll use them. */
2697 post_dominators = NULL;
2698 if (HAVE_conditional_execution || life_data_ok)
2700 post_dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
2701 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2703 if (life_data_ok)
2704 clear_bb_flags ();
2706 /* Go through each of the basic blocks looking for things to convert. */
2707 FOR_EACH_BB (bb)
2708 while (find_if_header (bb))
2709 continue;
2711 if (post_dominators)
2712 sbitmap_vector_free (post_dominators);
2714 if (rtl_dump_file)
2715 fflush (rtl_dump_file);
2717 clear_aux_for_blocks ();
2719 /* Rebuild life info for basic blocks that require it. */
2720 if (num_removed_blocks && life_data_ok)
2722 /* If we allocated new pseudos, we must resize the array for sched1. */
2723 if (max_regno < max_reg_num ())
2725 max_regno = max_reg_num ();
2726 allocate_reg_info (max_regno, FALSE, FALSE);
2728 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
2729 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2730 | PROP_KILL_DEAD_CODE);
2733 /* Write the final stats. */
2734 if (rtl_dump_file && num_possible_if_blocks > 0)
2736 fprintf (rtl_dump_file,
2737 "\n%d possible IF blocks searched.\n",
2738 num_possible_if_blocks);
2739 fprintf (rtl_dump_file,
2740 "%d IF blocks converted.\n",
2741 num_updated_if_blocks);
2742 fprintf (rtl_dump_file,
2743 "%d basic blocks deleted.\n\n\n",
2744 num_removed_blocks);
2747 #ifdef ENABLE_CHECKING
2748 verify_flow_info ();
2749 #endif