PR target/16201
[official-gcc.git] / gcc / ifcvt.c
blob39fced43b2447ca88a7ed89e7ac8d0f3f3ac5cdf
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "function.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "except.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "expr.h"
37 #include "real.h"
38 #include "output.h"
39 #include "optabs.h"
40 #include "toplev.h"
41 #include "tm_p.h"
42 #include "cfgloop.h"
43 #include "target.h"
46 #ifndef HAVE_conditional_execution
47 #define HAVE_conditional_execution 0
48 #endif
49 #ifndef HAVE_conditional_move
50 #define HAVE_conditional_move 0
51 #endif
52 #ifndef HAVE_incscc
53 #define HAVE_incscc 0
54 #endif
55 #ifndef HAVE_decscc
56 #define HAVE_decscc 0
57 #endif
58 #ifndef HAVE_trap
59 #define HAVE_trap 0
60 #endif
61 #ifndef HAVE_conditional_trap
62 #define HAVE_conditional_trap 0
63 #endif
65 #ifndef MAX_CONDITIONAL_EXECUTE
66 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
67 #endif
69 #define NULL_EDGE ((edge) NULL)
70 #define NULL_BLOCK ((basic_block) NULL)
72 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
73 static int num_possible_if_blocks;
75 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
76 execution. */
77 static int num_updated_if_blocks;
79 /* # of changes made which require life information to be updated. */
80 static int num_true_changes;
82 /* Whether conditional execution changes were made. */
83 static int cond_exec_changed_p;
85 /* True if life data ok at present. */
86 static bool life_data_ok;
88 /* Forward references. */
89 static int count_bb_insns (basic_block);
90 static bool cheap_bb_rtx_cost_p (basic_block, int);
91 static rtx first_active_insn (basic_block);
92 static rtx last_active_insn (basic_block, int);
93 static basic_block block_fallthru (basic_block);
94 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
95 static rtx cond_exec_get_condition (rtx);
96 static int cond_exec_process_if_block (ce_if_block_t *, int);
97 static rtx noce_get_condition (rtx, rtx *);
98 static int noce_operand_ok (rtx);
99 static int noce_process_if_block (ce_if_block_t *);
100 static int process_if_block (ce_if_block_t *);
101 static void merge_if_block (ce_if_block_t *);
102 static int find_cond_trap (basic_block, edge, edge);
103 static basic_block find_if_header (basic_block, int);
104 static int block_jumps_and_fallthru_p (basic_block, basic_block);
105 static int find_if_block (ce_if_block_t *);
106 static int find_if_case_1 (basic_block, edge, edge);
107 static int find_if_case_2 (basic_block, edge, edge);
108 static int find_memory (rtx *, void *);
109 static int dead_or_predicable (basic_block, basic_block, basic_block,
110 basic_block, int);
111 static void noce_emit_move_insn (rtx, rtx);
112 static rtx block_has_only_trap (basic_block);
113 static void mark_loop_exit_edges (void);
115 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
116 static void
117 mark_loop_exit_edges (void)
119 struct loops loops;
120 basic_block bb;
121 edge e;
123 flow_loops_find (&loops, LOOP_TREE);
124 free_dominance_info (CDI_DOMINATORS);
126 if (loops.num > 1)
128 FOR_EACH_BB (bb)
130 edge_iterator ei;
131 FOR_EACH_EDGE (e, ei, bb->succs)
133 if (find_common_loop (bb->loop_father, e->dest->loop_father)
134 != bb->loop_father)
135 e->flags |= EDGE_LOOP_EXIT;
136 else
137 e->flags &= ~EDGE_LOOP_EXIT;
142 flow_loops_free (&loops);
145 /* Count the number of non-jump active insns in BB. */
147 static int
148 count_bb_insns (basic_block bb)
150 int count = 0;
151 rtx insn = BB_HEAD (bb);
153 while (1)
155 if (CALL_P (insn) || NONJUMP_INSN_P (insn))
156 count++;
158 if (insn == BB_END (bb))
159 break;
160 insn = NEXT_INSN (insn);
163 return count;
166 /* Determine whether the total insn_rtx_cost on non-jump insns in
167 basic block BB is less than MAX_COST. This function returns
168 false if the cost of any instruction could not be estimated. */
170 static bool
171 cheap_bb_rtx_cost_p (basic_block bb, int max_cost)
173 int count = 0;
174 rtx insn = BB_HEAD (bb);
176 while (1)
178 if (NONJUMP_INSN_P (insn))
180 int cost = insn_rtx_cost (PATTERN (insn));
181 if (cost == 0)
182 return false;
184 /* If this instruction is the load or set of a "stack" register,
185 such as a floating point register on x87, then the cost of
186 speculatively executing this instruction needs to include
187 the additional cost of popping this register off of the
188 register stack. */
189 #ifdef STACK_REGS
191 rtx set = single_set (insn);
192 if (set && STACK_REG_P (SET_DEST (set)))
193 cost += COSTS_N_INSNS (1);
195 #endif
197 count += cost;
198 if (count >= max_cost)
199 return false;
201 else if (CALL_P (insn))
202 return false;
204 if (insn == BB_END (bb))
205 break;
206 insn = NEXT_INSN (insn);
209 return true;
212 /* Return the first non-jump active insn in the basic block. */
214 static rtx
215 first_active_insn (basic_block bb)
217 rtx insn = BB_HEAD (bb);
219 if (LABEL_P (insn))
221 if (insn == BB_END (bb))
222 return NULL_RTX;
223 insn = NEXT_INSN (insn);
226 while (NOTE_P (insn))
228 if (insn == BB_END (bb))
229 return NULL_RTX;
230 insn = NEXT_INSN (insn);
233 if (JUMP_P (insn))
234 return NULL_RTX;
236 return insn;
239 /* Return the last non-jump active (non-jump) insn in the basic block. */
241 static rtx
242 last_active_insn (basic_block bb, int skip_use_p)
244 rtx insn = BB_END (bb);
245 rtx head = BB_HEAD (bb);
247 while (NOTE_P (insn)
248 || JUMP_P (insn)
249 || (skip_use_p
250 && NONJUMP_INSN_P (insn)
251 && GET_CODE (PATTERN (insn)) == USE))
253 if (insn == head)
254 return NULL_RTX;
255 insn = PREV_INSN (insn);
258 if (LABEL_P (insn))
259 return NULL_RTX;
261 return insn;
264 /* Return the basic block reached by falling though the basic block BB. */
266 static basic_block
267 block_fallthru (basic_block bb)
269 edge e;
270 edge_iterator ei;
272 FOR_EACH_EDGE (e, ei, bb->succs)
273 if (e->flags & EDGE_FALLTHRU)
274 break;
276 return (e) ? e->dest : NULL_BLOCK;
279 /* Go through a bunch of insns, converting them to conditional
280 execution format if possible. Return TRUE if all of the non-note
281 insns were processed. */
283 static int
284 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
285 /* if block information */rtx start,
286 /* first insn to look at */rtx end,
287 /* last insn to look at */rtx test,
288 /* conditional execution test */rtx prob_val,
289 /* probability of branch taken. */int mod_ok)
291 int must_be_last = FALSE;
292 rtx insn;
293 rtx xtest;
294 rtx pattern;
296 if (!start || !end)
297 return FALSE;
299 for (insn = start; ; insn = NEXT_INSN (insn))
301 if (NOTE_P (insn))
302 goto insn_done;
304 if (!NONJUMP_INSN_P (insn) && !CALL_P (insn))
305 abort ();
307 /* Remove USE insns that get in the way. */
308 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
310 /* ??? Ug. Actually unlinking the thing is problematic,
311 given what we'd have to coordinate with our callers. */
312 SET_INSN_DELETED (insn);
313 goto insn_done;
316 /* Last insn wasn't last? */
317 if (must_be_last)
318 return FALSE;
320 if (modified_in_p (test, insn))
322 if (!mod_ok)
323 return FALSE;
324 must_be_last = TRUE;
327 /* Now build the conditional form of the instruction. */
328 pattern = PATTERN (insn);
329 xtest = copy_rtx (test);
331 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
332 two conditions. */
333 if (GET_CODE (pattern) == COND_EXEC)
335 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
336 return FALSE;
338 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
339 COND_EXEC_TEST (pattern));
340 pattern = COND_EXEC_CODE (pattern);
343 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
345 /* If the machine needs to modify the insn being conditionally executed,
346 say for example to force a constant integer operand into a temp
347 register, do so here. */
348 #ifdef IFCVT_MODIFY_INSN
349 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
350 if (! pattern)
351 return FALSE;
352 #endif
354 validate_change (insn, &PATTERN (insn), pattern, 1);
356 if (CALL_P (insn) && prob_val)
357 validate_change (insn, &REG_NOTES (insn),
358 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
359 REG_NOTES (insn)), 1);
361 insn_done:
362 if (insn == end)
363 break;
366 return TRUE;
369 /* Return the condition for a jump. Do not do any special processing. */
371 static rtx
372 cond_exec_get_condition (rtx jump)
374 rtx test_if, cond;
376 if (any_condjump_p (jump))
377 test_if = SET_SRC (pc_set (jump));
378 else
379 return NULL_RTX;
380 cond = XEXP (test_if, 0);
382 /* If this branches to JUMP_LABEL when the condition is false,
383 reverse the condition. */
384 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
385 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
387 enum rtx_code rev = reversed_comparison_code (cond, jump);
388 if (rev == UNKNOWN)
389 return NULL_RTX;
391 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
392 XEXP (cond, 1));
395 return cond;
398 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
399 to conditional execution. Return TRUE if we were successful at
400 converting the block. */
402 static int
403 cond_exec_process_if_block (ce_if_block_t * ce_info,
404 /* if block information */int do_multiple_p)
406 basic_block test_bb = ce_info->test_bb; /* last test block */
407 basic_block then_bb = ce_info->then_bb; /* THEN */
408 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
409 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
410 rtx then_start; /* first insn in THEN block */
411 rtx then_end; /* last insn + 1 in THEN block */
412 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
413 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
414 int max; /* max # of insns to convert. */
415 int then_mod_ok; /* whether conditional mods are ok in THEN */
416 rtx true_expr; /* test for else block insns */
417 rtx false_expr; /* test for then block insns */
418 rtx true_prob_val; /* probability of else block */
419 rtx false_prob_val; /* probability of then block */
420 int n_insns;
421 enum rtx_code false_code;
423 /* If test is comprised of && or || elements, and we've failed at handling
424 all of them together, just use the last test if it is the special case of
425 && elements without an ELSE block. */
426 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
428 if (else_bb || ! ce_info->and_and_p)
429 return FALSE;
431 ce_info->test_bb = test_bb = ce_info->last_test_bb;
432 ce_info->num_multiple_test_blocks = 0;
433 ce_info->num_and_and_blocks = 0;
434 ce_info->num_or_or_blocks = 0;
437 /* Find the conditional jump to the ELSE or JOIN part, and isolate
438 the test. */
439 test_expr = cond_exec_get_condition (BB_END (test_bb));
440 if (! test_expr)
441 return FALSE;
443 /* If the conditional jump is more than just a conditional jump,
444 then we can not do conditional execution conversion on this block. */
445 if (! onlyjump_p (BB_END (test_bb)))
446 return FALSE;
448 /* Collect the bounds of where we're to search, skipping any labels, jumps
449 and notes at the beginning and end of the block. Then count the total
450 number of insns and see if it is small enough to convert. */
451 then_start = first_active_insn (then_bb);
452 then_end = last_active_insn (then_bb, TRUE);
453 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
454 max = MAX_CONDITIONAL_EXECUTE;
456 if (else_bb)
458 max *= 2;
459 else_start = first_active_insn (else_bb);
460 else_end = last_active_insn (else_bb, TRUE);
461 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
464 if (n_insns > max)
465 return FALSE;
467 /* Map test_expr/test_jump into the appropriate MD tests to use on
468 the conditionally executed code. */
470 true_expr = test_expr;
472 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
473 if (false_code != UNKNOWN)
474 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
475 XEXP (true_expr, 0), XEXP (true_expr, 1));
476 else
477 false_expr = NULL_RTX;
479 #ifdef IFCVT_MODIFY_TESTS
480 /* If the machine description needs to modify the tests, such as setting a
481 conditional execution register from a comparison, it can do so here. */
482 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
484 /* See if the conversion failed. */
485 if (!true_expr || !false_expr)
486 goto fail;
487 #endif
489 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
490 if (true_prob_val)
492 true_prob_val = XEXP (true_prob_val, 0);
493 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
495 else
496 false_prob_val = NULL_RTX;
498 /* If we have && or || tests, do them here. These tests are in the adjacent
499 blocks after the first block containing the test. */
500 if (ce_info->num_multiple_test_blocks > 0)
502 basic_block bb = test_bb;
503 basic_block last_test_bb = ce_info->last_test_bb;
505 if (! false_expr)
506 goto fail;
510 rtx start, end;
511 rtx t, f;
512 enum rtx_code f_code;
514 bb = block_fallthru (bb);
515 start = first_active_insn (bb);
516 end = last_active_insn (bb, TRUE);
517 if (start
518 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
519 false_prob_val, FALSE))
520 goto fail;
522 /* If the conditional jump is more than just a conditional jump, then
523 we can not do conditional execution conversion on this block. */
524 if (! onlyjump_p (BB_END (bb)))
525 goto fail;
527 /* Find the conditional jump and isolate the test. */
528 t = cond_exec_get_condition (BB_END (bb));
529 if (! t)
530 goto fail;
532 f_code = reversed_comparison_code (t, BB_END (bb));
533 if (f_code == UNKNOWN)
534 goto fail;
536 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
537 if (ce_info->and_and_p)
539 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
540 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
542 else
544 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
545 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
548 /* If the machine description needs to modify the tests, such as
549 setting a conditional execution register from a comparison, it can
550 do so here. */
551 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
552 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
554 /* See if the conversion failed. */
555 if (!t || !f)
556 goto fail;
557 #endif
559 true_expr = t;
560 false_expr = f;
562 while (bb != last_test_bb);
565 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
566 on then THEN block. */
567 then_mod_ok = (else_bb == NULL_BLOCK);
569 /* Go through the THEN and ELSE blocks converting the insns if possible
570 to conditional execution. */
572 if (then_end
573 && (! false_expr
574 || ! cond_exec_process_insns (ce_info, then_start, then_end,
575 false_expr, false_prob_val,
576 then_mod_ok)))
577 goto fail;
579 if (else_bb && else_end
580 && ! cond_exec_process_insns (ce_info, else_start, else_end,
581 true_expr, true_prob_val, TRUE))
582 goto fail;
584 /* If we cannot apply the changes, fail. Do not go through the normal fail
585 processing, since apply_change_group will call cancel_changes. */
586 if (! apply_change_group ())
588 #ifdef IFCVT_MODIFY_CANCEL
589 /* Cancel any machine dependent changes. */
590 IFCVT_MODIFY_CANCEL (ce_info);
591 #endif
592 return FALSE;
595 #ifdef IFCVT_MODIFY_FINAL
596 /* Do any machine dependent final modifications. */
597 IFCVT_MODIFY_FINAL (ce_info);
598 #endif
600 /* Conversion succeeded. */
601 if (dump_file)
602 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
603 n_insns, (n_insns == 1) ? " was" : "s were");
605 /* Merge the blocks! */
606 merge_if_block (ce_info);
607 cond_exec_changed_p = TRUE;
608 return TRUE;
610 fail:
611 #ifdef IFCVT_MODIFY_CANCEL
612 /* Cancel any machine dependent changes. */
613 IFCVT_MODIFY_CANCEL (ce_info);
614 #endif
616 cancel_changes (0);
617 return FALSE;
620 /* Used by noce_process_if_block to communicate with its subroutines.
622 The subroutines know that A and B may be evaluated freely. They
623 know that X is a register. They should insert new instructions
624 before cond_earliest. */
626 struct noce_if_info
628 basic_block test_bb;
629 rtx insn_a, insn_b;
630 rtx x, a, b;
631 rtx jump, cond, cond_earliest;
632 /* True if "b" was originally evaluated unconditionally. */
633 bool b_unconditional;
636 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
637 static int noce_try_move (struct noce_if_info *);
638 static int noce_try_store_flag (struct noce_if_info *);
639 static int noce_try_addcc (struct noce_if_info *);
640 static int noce_try_store_flag_constants (struct noce_if_info *);
641 static int noce_try_store_flag_mask (struct noce_if_info *);
642 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
643 rtx, rtx, rtx);
644 static int noce_try_cmove (struct noce_if_info *);
645 static int noce_try_cmove_arith (struct noce_if_info *);
646 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
647 static int noce_try_minmax (struct noce_if_info *);
648 static int noce_try_abs (struct noce_if_info *);
649 static int noce_try_sign_mask (struct noce_if_info *);
651 /* Helper function for noce_try_store_flag*. */
653 static rtx
654 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
655 int normalize)
657 rtx cond = if_info->cond;
658 int cond_complex;
659 enum rtx_code code;
661 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
662 || ! general_operand (XEXP (cond, 1), VOIDmode));
664 /* If earliest == jump, or when the condition is complex, try to
665 build the store_flag insn directly. */
667 if (cond_complex)
668 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
670 if (reversep)
671 code = reversed_comparison_code (cond, if_info->jump);
672 else
673 code = GET_CODE (cond);
675 if ((if_info->cond_earliest == if_info->jump || cond_complex)
676 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
678 rtx tmp;
680 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
681 XEXP (cond, 1));
682 tmp = gen_rtx_SET (VOIDmode, x, tmp);
684 start_sequence ();
685 tmp = emit_insn (tmp);
687 if (recog_memoized (tmp) >= 0)
689 tmp = get_insns ();
690 end_sequence ();
691 emit_insn (tmp);
693 if_info->cond_earliest = if_info->jump;
695 return x;
698 end_sequence ();
701 /* Don't even try if the comparison operands or the mode of X are weird. */
702 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
703 return NULL_RTX;
705 return emit_store_flag (x, code, XEXP (cond, 0),
706 XEXP (cond, 1), VOIDmode,
707 (code == LTU || code == LEU
708 || code == GEU || code == GTU), normalize);
711 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
712 X is the destination/target and Y is the value to copy. */
714 static void
715 noce_emit_move_insn (rtx x, rtx y)
717 enum machine_mode outmode, inmode;
718 rtx outer, inner;
719 int bitpos;
721 if (GET_CODE (x) != STRICT_LOW_PART)
723 emit_move_insn (x, y);
724 return;
727 outer = XEXP (x, 0);
728 inner = XEXP (outer, 0);
729 outmode = GET_MODE (outer);
730 inmode = GET_MODE (inner);
731 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
732 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
735 /* Return sequence of instructions generated by if conversion. This
736 function calls end_sequence() to end the current stream, ensures
737 that are instructions are unshared, recognizable non-jump insns.
738 On failure, this function returns a NULL_RTX. */
740 static rtx
741 end_ifcvt_sequence (struct noce_if_info *if_info)
743 rtx insn;
744 rtx seq = get_insns ();
746 set_used_flags (if_info->x);
747 set_used_flags (if_info->cond);
748 unshare_all_rtl_in_chain (seq);
749 end_sequence ();
751 /* Make sure that all of the instructions emitted are recognizable,
752 and that we haven't introduced a new jump instruction.
753 As an exercise for the reader, build a general mechanism that
754 allows proper placement of required clobbers. */
755 for (insn = seq; insn; insn = NEXT_INSN (insn))
756 if (JUMP_P (insn)
757 || recog_memoized (insn) == -1)
758 return NULL_RTX;
760 return seq;
763 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
764 "if (a == b) x = a; else x = b" into "x = b". */
766 static int
767 noce_try_move (struct noce_if_info *if_info)
769 rtx cond = if_info->cond;
770 enum rtx_code code = GET_CODE (cond);
771 rtx y, seq;
773 if (code != NE && code != EQ)
774 return FALSE;
776 /* This optimization isn't valid if either A or B could be a NaN
777 or a signed zero. */
778 if (HONOR_NANS (GET_MODE (if_info->x))
779 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
780 return FALSE;
782 /* Check whether the operands of the comparison are A and in
783 either order. */
784 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
785 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
786 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
787 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
789 y = (code == EQ) ? if_info->a : if_info->b;
791 /* Avoid generating the move if the source is the destination. */
792 if (! rtx_equal_p (if_info->x, y))
794 start_sequence ();
795 noce_emit_move_insn (if_info->x, y);
796 seq = end_ifcvt_sequence (if_info);
797 if (!seq)
798 return FALSE;
800 emit_insn_before_setloc (seq, if_info->jump,
801 INSN_LOCATOR (if_info->insn_a));
803 return TRUE;
805 return FALSE;
808 /* Convert "if (test) x = 1; else x = 0".
810 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
811 tried in noce_try_store_flag_constants after noce_try_cmove has had
812 a go at the conversion. */
814 static int
815 noce_try_store_flag (struct noce_if_info *if_info)
817 int reversep;
818 rtx target, seq;
820 if (GET_CODE (if_info->b) == CONST_INT
821 && INTVAL (if_info->b) == STORE_FLAG_VALUE
822 && if_info->a == const0_rtx)
823 reversep = 0;
824 else if (if_info->b == const0_rtx
825 && GET_CODE (if_info->a) == CONST_INT
826 && INTVAL (if_info->a) == STORE_FLAG_VALUE
827 && (reversed_comparison_code (if_info->cond, if_info->jump)
828 != UNKNOWN))
829 reversep = 1;
830 else
831 return FALSE;
833 start_sequence ();
835 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
836 if (target)
838 if (target != if_info->x)
839 noce_emit_move_insn (if_info->x, target);
841 seq = end_ifcvt_sequence (if_info);
842 if (! seq)
843 return FALSE;
845 emit_insn_before_setloc (seq, if_info->jump,
846 INSN_LOCATOR (if_info->insn_a));
847 return TRUE;
849 else
851 end_sequence ();
852 return FALSE;
856 /* Convert "if (test) x = a; else x = b", for A and B constant. */
858 static int
859 noce_try_store_flag_constants (struct noce_if_info *if_info)
861 rtx target, seq;
862 int reversep;
863 HOST_WIDE_INT itrue, ifalse, diff, tmp;
864 int normalize, can_reverse;
865 enum machine_mode mode;
867 if (! no_new_pseudos
868 && GET_CODE (if_info->a) == CONST_INT
869 && GET_CODE (if_info->b) == CONST_INT)
871 mode = GET_MODE (if_info->x);
872 ifalse = INTVAL (if_info->a);
873 itrue = INTVAL (if_info->b);
875 /* Make sure we can represent the difference between the two values. */
876 if ((itrue - ifalse > 0)
877 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
878 return FALSE;
880 diff = trunc_int_for_mode (itrue - ifalse, mode);
882 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
883 != UNKNOWN);
885 reversep = 0;
886 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
887 normalize = 0;
888 else if (ifalse == 0 && exact_log2 (itrue) >= 0
889 && (STORE_FLAG_VALUE == 1
890 || BRANCH_COST >= 2))
891 normalize = 1;
892 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
893 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
894 normalize = 1, reversep = 1;
895 else if (itrue == -1
896 && (STORE_FLAG_VALUE == -1
897 || BRANCH_COST >= 2))
898 normalize = -1;
899 else if (ifalse == -1 && can_reverse
900 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
901 normalize = -1, reversep = 1;
902 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
903 || BRANCH_COST >= 3)
904 normalize = -1;
905 else
906 return FALSE;
908 if (reversep)
910 tmp = itrue; itrue = ifalse; ifalse = tmp;
911 diff = trunc_int_for_mode (-diff, mode);
914 start_sequence ();
915 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
916 if (! target)
918 end_sequence ();
919 return FALSE;
922 /* if (test) x = 3; else x = 4;
923 => x = 3 + (test == 0); */
924 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
926 target = expand_simple_binop (mode,
927 (diff == STORE_FLAG_VALUE
928 ? PLUS : MINUS),
929 GEN_INT (ifalse), target, if_info->x, 0,
930 OPTAB_WIDEN);
933 /* if (test) x = 8; else x = 0;
934 => x = (test != 0) << 3; */
935 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
937 target = expand_simple_binop (mode, ASHIFT,
938 target, GEN_INT (tmp), if_info->x, 0,
939 OPTAB_WIDEN);
942 /* if (test) x = -1; else x = b;
943 => x = -(test != 0) | b; */
944 else if (itrue == -1)
946 target = expand_simple_binop (mode, IOR,
947 target, GEN_INT (ifalse), if_info->x, 0,
948 OPTAB_WIDEN);
951 /* if (test) x = a; else x = b;
952 => x = (-(test != 0) & (b - a)) + a; */
953 else
955 target = expand_simple_binop (mode, AND,
956 target, GEN_INT (diff), if_info->x, 0,
957 OPTAB_WIDEN);
958 if (target)
959 target = expand_simple_binop (mode, PLUS,
960 target, GEN_INT (ifalse),
961 if_info->x, 0, OPTAB_WIDEN);
964 if (! target)
966 end_sequence ();
967 return FALSE;
970 if (target != if_info->x)
971 noce_emit_move_insn (if_info->x, target);
973 seq = end_ifcvt_sequence (if_info);
974 if (!seq)
975 return FALSE;
977 emit_insn_before_setloc (seq, if_info->jump,
978 INSN_LOCATOR (if_info->insn_a));
979 return TRUE;
982 return FALSE;
985 /* Convert "if (test) foo++" into "foo += (test != 0)", and
986 similarly for "foo--". */
988 static int
989 noce_try_addcc (struct noce_if_info *if_info)
991 rtx target, seq;
992 int subtract, normalize;
994 if (! no_new_pseudos
995 && GET_CODE (if_info->a) == PLUS
996 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
997 && (reversed_comparison_code (if_info->cond, if_info->jump)
998 != UNKNOWN))
1000 rtx cond = if_info->cond;
1001 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1003 /* First try to use addcc pattern. */
1004 if (general_operand (XEXP (cond, 0), VOIDmode)
1005 && general_operand (XEXP (cond, 1), VOIDmode))
1007 start_sequence ();
1008 target = emit_conditional_add (if_info->x, code,
1009 XEXP (cond, 0),
1010 XEXP (cond, 1),
1011 VOIDmode,
1012 if_info->b,
1013 XEXP (if_info->a, 1),
1014 GET_MODE (if_info->x),
1015 (code == LTU || code == GEU
1016 || code == LEU || code == GTU));
1017 if (target)
1019 if (target != if_info->x)
1020 noce_emit_move_insn (if_info->x, target);
1022 seq = end_ifcvt_sequence (if_info);
1023 if (!seq)
1024 return FALSE;
1026 emit_insn_before_setloc (seq, if_info->jump,
1027 INSN_LOCATOR (if_info->insn_a));
1028 return TRUE;
1030 end_sequence ();
1033 /* If that fails, construct conditional increment or decrement using
1034 setcc. */
1035 if (BRANCH_COST >= 2
1036 && (XEXP (if_info->a, 1) == const1_rtx
1037 || XEXP (if_info->a, 1) == constm1_rtx))
1039 start_sequence ();
1040 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1041 subtract = 0, normalize = 0;
1042 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1043 subtract = 1, normalize = 0;
1044 else
1045 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1048 target = noce_emit_store_flag (if_info,
1049 gen_reg_rtx (GET_MODE (if_info->x)),
1050 1, normalize);
1052 if (target)
1053 target = expand_simple_binop (GET_MODE (if_info->x),
1054 subtract ? MINUS : PLUS,
1055 if_info->b, target, if_info->x,
1056 0, OPTAB_WIDEN);
1057 if (target)
1059 if (target != if_info->x)
1060 noce_emit_move_insn (if_info->x, target);
1062 seq = end_ifcvt_sequence (if_info);
1063 if (!seq)
1064 return FALSE;
1066 emit_insn_before_setloc (seq, if_info->jump,
1067 INSN_LOCATOR (if_info->insn_a));
1068 return TRUE;
1070 end_sequence ();
1074 return FALSE;
1077 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1079 static int
1080 noce_try_store_flag_mask (struct noce_if_info *if_info)
1082 rtx target, seq;
1083 int reversep;
1085 reversep = 0;
1086 if (! no_new_pseudos
1087 && (BRANCH_COST >= 2
1088 || STORE_FLAG_VALUE == -1)
1089 && ((if_info->a == const0_rtx
1090 && rtx_equal_p (if_info->b, if_info->x))
1091 || ((reversep = (reversed_comparison_code (if_info->cond,
1092 if_info->jump)
1093 != UNKNOWN))
1094 && if_info->b == const0_rtx
1095 && rtx_equal_p (if_info->a, if_info->x))))
1097 start_sequence ();
1098 target = noce_emit_store_flag (if_info,
1099 gen_reg_rtx (GET_MODE (if_info->x)),
1100 reversep, -1);
1101 if (target)
1102 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1103 if_info->x,
1104 target, if_info->x, 0,
1105 OPTAB_WIDEN);
1107 if (target)
1109 if (target != if_info->x)
1110 noce_emit_move_insn (if_info->x, target);
1112 seq = end_ifcvt_sequence (if_info);
1113 if (!seq)
1114 return FALSE;
1116 emit_insn_before_setloc (seq, if_info->jump,
1117 INSN_LOCATOR (if_info->insn_a));
1118 return TRUE;
1121 end_sequence ();
1124 return FALSE;
1127 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1129 static rtx
1130 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1131 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1133 /* If earliest == jump, try to build the cmove insn directly.
1134 This is helpful when combine has created some complex condition
1135 (like for alpha's cmovlbs) that we can't hope to regenerate
1136 through the normal interface. */
1138 if (if_info->cond_earliest == if_info->jump)
1140 rtx tmp;
1142 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1143 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1144 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1146 start_sequence ();
1147 tmp = emit_insn (tmp);
1149 if (recog_memoized (tmp) >= 0)
1151 tmp = get_insns ();
1152 end_sequence ();
1153 emit_insn (tmp);
1155 return x;
1158 end_sequence ();
1161 /* Don't even try if the comparison operands are weird. */
1162 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1163 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1164 return NULL_RTX;
1166 #if HAVE_conditional_move
1167 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1168 vtrue, vfalse, GET_MODE (x),
1169 (code == LTU || code == GEU
1170 || code == LEU || code == GTU));
1171 #else
1172 /* We'll never get here, as noce_process_if_block doesn't call the
1173 functions involved. Ifdef code, however, should be discouraged
1174 because it leads to typos in the code not selected. However,
1175 emit_conditional_move won't exist either. */
1176 return NULL_RTX;
1177 #endif
1180 /* Try only simple constants and registers here. More complex cases
1181 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1182 has had a go at it. */
1184 static int
1185 noce_try_cmove (struct noce_if_info *if_info)
1187 enum rtx_code code;
1188 rtx target, seq;
1190 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1191 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1193 start_sequence ();
1195 code = GET_CODE (if_info->cond);
1196 target = noce_emit_cmove (if_info, if_info->x, code,
1197 XEXP (if_info->cond, 0),
1198 XEXP (if_info->cond, 1),
1199 if_info->a, if_info->b);
1201 if (target)
1203 if (target != if_info->x)
1204 noce_emit_move_insn (if_info->x, target);
1206 seq = end_ifcvt_sequence (if_info);
1207 if (!seq)
1208 return FALSE;
1210 emit_insn_before_setloc (seq, if_info->jump,
1211 INSN_LOCATOR (if_info->insn_a));
1212 return TRUE;
1214 else
1216 end_sequence ();
1217 return FALSE;
1221 return FALSE;
1224 /* Try more complex cases involving conditional_move. */
1226 static int
1227 noce_try_cmove_arith (struct noce_if_info *if_info)
1229 rtx a = if_info->a;
1230 rtx b = if_info->b;
1231 rtx x = if_info->x;
1232 rtx orig_a, orig_b;
1233 rtx insn_a, insn_b;
1234 rtx tmp, target;
1235 int is_mem = 0;
1236 int insn_cost;
1237 enum rtx_code code;
1239 /* A conditional move from two memory sources is equivalent to a
1240 conditional on their addresses followed by a load. Don't do this
1241 early because it'll screw alias analysis. Note that we've
1242 already checked for no side effects. */
1243 if (! no_new_pseudos && cse_not_expected
1244 && MEM_P (a) && MEM_P (b)
1245 && BRANCH_COST >= 5)
1247 a = XEXP (a, 0);
1248 b = XEXP (b, 0);
1249 x = gen_reg_rtx (Pmode);
1250 is_mem = 1;
1253 /* ??? We could handle this if we knew that a load from A or B could
1254 not fault. This is also true if we've already loaded
1255 from the address along the path from ENTRY. */
1256 else if (may_trap_p (a) || may_trap_p (b))
1257 return FALSE;
1259 /* if (test) x = a + b; else x = c - d;
1260 => y = a + b;
1261 x = c - d;
1262 if (test)
1263 x = y;
1266 code = GET_CODE (if_info->cond);
1267 insn_a = if_info->insn_a;
1268 insn_b = if_info->insn_b;
1270 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1271 if insn_rtx_cost can't be estimated. */
1272 if (insn_a)
1274 insn_cost = insn_rtx_cost (PATTERN (insn_a));
1275 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1276 return FALSE;
1278 else
1280 insn_cost = 0;
1283 if (insn_b) {
1284 insn_cost += insn_rtx_cost (PATTERN (insn_b));
1285 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1286 return FALSE;
1289 /* Possibly rearrange operands to make things come out more natural. */
1290 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1292 int reversep = 0;
1293 if (rtx_equal_p (b, x))
1294 reversep = 1;
1295 else if (general_operand (b, GET_MODE (b)))
1296 reversep = 1;
1298 if (reversep)
1300 code = reversed_comparison_code (if_info->cond, if_info->jump);
1301 tmp = a, a = b, b = tmp;
1302 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1306 start_sequence ();
1308 orig_a = a;
1309 orig_b = b;
1311 /* If either operand is complex, load it into a register first.
1312 The best way to do this is to copy the original insn. In this
1313 way we preserve any clobbers etc that the insn may have had.
1314 This is of course not possible in the IS_MEM case. */
1315 if (! general_operand (a, GET_MODE (a)))
1317 rtx set;
1319 if (no_new_pseudos)
1320 goto end_seq_and_fail;
1322 if (is_mem)
1324 tmp = gen_reg_rtx (GET_MODE (a));
1325 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1327 else if (! insn_a)
1328 goto end_seq_and_fail;
1329 else
1331 a = gen_reg_rtx (GET_MODE (a));
1332 tmp = copy_rtx (insn_a);
1333 set = single_set (tmp);
1334 SET_DEST (set) = a;
1335 tmp = emit_insn (PATTERN (tmp));
1337 if (recog_memoized (tmp) < 0)
1338 goto end_seq_and_fail;
1340 if (! general_operand (b, GET_MODE (b)))
1342 rtx set, last;
1344 if (no_new_pseudos)
1345 goto end_seq_and_fail;
1347 if (is_mem)
1349 tmp = gen_reg_rtx (GET_MODE (b));
1350 tmp = gen_rtx_SET (VOIDmode, tmp, b);
1352 else if (! insn_b)
1353 goto end_seq_and_fail;
1354 else
1356 b = gen_reg_rtx (GET_MODE (b));
1357 tmp = copy_rtx (insn_b);
1358 set = single_set (tmp);
1359 SET_DEST (set) = b;
1360 tmp = PATTERN (tmp);
1363 /* If insn to set up A clobbers any registers B depends on, try to
1364 swap insn that sets up A with the one that sets up B. If even
1365 that doesn't help, punt. */
1366 last = get_last_insn ();
1367 if (last && modified_in_p (orig_b, last))
1369 tmp = emit_insn_before (tmp, get_insns ());
1370 if (modified_in_p (orig_a, tmp))
1371 goto end_seq_and_fail;
1373 else
1374 tmp = emit_insn (tmp);
1376 if (recog_memoized (tmp) < 0)
1377 goto end_seq_and_fail;
1380 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1381 XEXP (if_info->cond, 1), a, b);
1383 if (! target)
1384 goto end_seq_and_fail;
1386 /* If we're handling a memory for above, emit the load now. */
1387 if (is_mem)
1389 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1391 /* Copy over flags as appropriate. */
1392 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1393 MEM_VOLATILE_P (tmp) = 1;
1394 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1395 MEM_IN_STRUCT_P (tmp) = 1;
1396 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1397 MEM_SCALAR_P (tmp) = 1;
1398 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1399 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1400 set_mem_align (tmp,
1401 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1403 noce_emit_move_insn (if_info->x, tmp);
1405 else if (target != x)
1406 noce_emit_move_insn (x, target);
1408 tmp = end_ifcvt_sequence (if_info);
1409 if (!tmp)
1410 return FALSE;
1412 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1413 return TRUE;
1415 end_seq_and_fail:
1416 end_sequence ();
1417 return FALSE;
1420 /* For most cases, the simplified condition we found is the best
1421 choice, but this is not the case for the min/max/abs transforms.
1422 For these we wish to know that it is A or B in the condition. */
1424 static rtx
1425 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1426 rtx *earliest)
1428 rtx cond, set, insn;
1429 int reverse;
1431 /* If target is already mentioned in the known condition, return it. */
1432 if (reg_mentioned_p (target, if_info->cond))
1434 *earliest = if_info->cond_earliest;
1435 return if_info->cond;
1438 set = pc_set (if_info->jump);
1439 cond = XEXP (SET_SRC (set), 0);
1440 reverse
1441 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1442 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1444 /* If we're looking for a constant, try to make the conditional
1445 have that constant in it. There are two reasons why it may
1446 not have the constant we want:
1448 1. GCC may have needed to put the constant in a register, because
1449 the target can't compare directly against that constant. For
1450 this case, we look for a SET immediately before the comparison
1451 that puts a constant in that register.
1453 2. GCC may have canonicalized the conditional, for example
1454 replacing "if x < 4" with "if x <= 3". We can undo that (or
1455 make equivalent types of changes) to get the constants we need
1456 if they're off by one in the right direction. */
1458 if (GET_CODE (target) == CONST_INT)
1460 enum rtx_code code = GET_CODE (if_info->cond);
1461 rtx op_a = XEXP (if_info->cond, 0);
1462 rtx op_b = XEXP (if_info->cond, 1);
1463 rtx prev_insn;
1465 /* First, look to see if we put a constant in a register. */
1466 prev_insn = PREV_INSN (if_info->cond_earliest);
1467 if (prev_insn
1468 && INSN_P (prev_insn)
1469 && GET_CODE (PATTERN (prev_insn)) == SET)
1471 rtx src = find_reg_equal_equiv_note (prev_insn);
1472 if (!src)
1473 src = SET_SRC (PATTERN (prev_insn));
1474 if (GET_CODE (src) == CONST_INT)
1476 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1477 op_a = src;
1478 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1479 op_b = src;
1481 if (GET_CODE (op_a) == CONST_INT)
1483 rtx tmp = op_a;
1484 op_a = op_b;
1485 op_b = tmp;
1486 code = swap_condition (code);
1491 /* Now, look to see if we can get the right constant by
1492 adjusting the conditional. */
1493 if (GET_CODE (op_b) == CONST_INT)
1495 HOST_WIDE_INT desired_val = INTVAL (target);
1496 HOST_WIDE_INT actual_val = INTVAL (op_b);
1498 switch (code)
1500 case LT:
1501 if (actual_val == desired_val + 1)
1503 code = LE;
1504 op_b = GEN_INT (desired_val);
1506 break;
1507 case LE:
1508 if (actual_val == desired_val - 1)
1510 code = LT;
1511 op_b = GEN_INT (desired_val);
1513 break;
1514 case GT:
1515 if (actual_val == desired_val - 1)
1517 code = GE;
1518 op_b = GEN_INT (desired_val);
1520 break;
1521 case GE:
1522 if (actual_val == desired_val + 1)
1524 code = GT;
1525 op_b = GEN_INT (desired_val);
1527 break;
1528 default:
1529 break;
1533 /* If we made any changes, generate a new conditional that is
1534 equivalent to what we started with, but has the right
1535 constants in it. */
1536 if (code != GET_CODE (if_info->cond)
1537 || op_a != XEXP (if_info->cond, 0)
1538 || op_b != XEXP (if_info->cond, 1))
1540 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1541 *earliest = if_info->cond_earliest;
1542 return cond;
1546 cond = canonicalize_condition (if_info->jump, cond, reverse,
1547 earliest, target, false, true);
1548 if (! cond || ! reg_mentioned_p (target, cond))
1549 return NULL;
1551 /* We almost certainly searched back to a different place.
1552 Need to re-verify correct lifetimes. */
1554 /* X may not be mentioned in the range (cond_earliest, jump]. */
1555 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1556 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1557 return NULL;
1559 /* A and B may not be modified in the range [cond_earliest, jump). */
1560 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1561 if (INSN_P (insn)
1562 && (modified_in_p (if_info->a, insn)
1563 || modified_in_p (if_info->b, insn)))
1564 return NULL;
1566 return cond;
1569 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1571 static int
1572 noce_try_minmax (struct noce_if_info *if_info)
1574 rtx cond, earliest, target, seq;
1575 enum rtx_code code, op;
1576 int unsignedp;
1578 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1579 if (no_new_pseudos)
1580 return FALSE;
1582 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1583 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1584 to get the target to tell us... */
1585 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1586 || HONOR_NANS (GET_MODE (if_info->x)))
1587 return FALSE;
1589 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1590 if (!cond)
1591 return FALSE;
1593 /* Verify the condition is of the form we expect, and canonicalize
1594 the comparison code. */
1595 code = GET_CODE (cond);
1596 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1598 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1599 return FALSE;
1601 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1603 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1604 return FALSE;
1605 code = swap_condition (code);
1607 else
1608 return FALSE;
1610 /* Determine what sort of operation this is. Note that the code is for
1611 a taken branch, so the code->operation mapping appears backwards. */
1612 switch (code)
1614 case LT:
1615 case LE:
1616 case UNLT:
1617 case UNLE:
1618 op = SMAX;
1619 unsignedp = 0;
1620 break;
1621 case GT:
1622 case GE:
1623 case UNGT:
1624 case UNGE:
1625 op = SMIN;
1626 unsignedp = 0;
1627 break;
1628 case LTU:
1629 case LEU:
1630 op = UMAX;
1631 unsignedp = 1;
1632 break;
1633 case GTU:
1634 case GEU:
1635 op = UMIN;
1636 unsignedp = 1;
1637 break;
1638 default:
1639 return FALSE;
1642 start_sequence ();
1644 target = expand_simple_binop (GET_MODE (if_info->x), op,
1645 if_info->a, if_info->b,
1646 if_info->x, unsignedp, OPTAB_WIDEN);
1647 if (! target)
1649 end_sequence ();
1650 return FALSE;
1652 if (target != if_info->x)
1653 noce_emit_move_insn (if_info->x, target);
1655 seq = end_ifcvt_sequence (if_info);
1656 if (!seq)
1657 return FALSE;
1659 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1660 if_info->cond = cond;
1661 if_info->cond_earliest = earliest;
1663 return TRUE;
1666 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1668 static int
1669 noce_try_abs (struct noce_if_info *if_info)
1671 rtx cond, earliest, target, seq, a, b, c;
1672 int negate;
1674 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1675 if (no_new_pseudos)
1676 return FALSE;
1678 /* Recognize A and B as constituting an ABS or NABS. */
1679 a = if_info->a;
1680 b = if_info->b;
1681 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1682 negate = 0;
1683 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1685 c = a; a = b; b = c;
1686 negate = 1;
1688 else
1689 return FALSE;
1691 cond = noce_get_alt_condition (if_info, b, &earliest);
1692 if (!cond)
1693 return FALSE;
1695 /* Verify the condition is of the form we expect. */
1696 if (rtx_equal_p (XEXP (cond, 0), b))
1697 c = XEXP (cond, 1);
1698 else if (rtx_equal_p (XEXP (cond, 1), b))
1699 c = XEXP (cond, 0);
1700 else
1701 return FALSE;
1703 /* Verify that C is zero. Search backward through the block for
1704 a REG_EQUAL note if necessary. */
1705 if (REG_P (c))
1707 rtx insn, note = NULL;
1708 for (insn = earliest;
1709 insn != BB_HEAD (if_info->test_bb);
1710 insn = PREV_INSN (insn))
1711 if (INSN_P (insn)
1712 && ((note = find_reg_note (insn, REG_EQUAL, c))
1713 || (note = find_reg_note (insn, REG_EQUIV, c))))
1714 break;
1715 if (! note)
1716 return FALSE;
1717 c = XEXP (note, 0);
1719 if (MEM_P (c)
1720 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1721 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1722 c = get_pool_constant (XEXP (c, 0));
1724 /* Work around funny ideas get_condition has wrt canonicalization.
1725 Note that these rtx constants are known to be CONST_INT, and
1726 therefore imply integer comparisons. */
1727 if (c == constm1_rtx && GET_CODE (cond) == GT)
1729 else if (c == const1_rtx && GET_CODE (cond) == LT)
1731 else if (c != CONST0_RTX (GET_MODE (b)))
1732 return FALSE;
1734 /* Determine what sort of operation this is. */
1735 switch (GET_CODE (cond))
1737 case LT:
1738 case LE:
1739 case UNLT:
1740 case UNLE:
1741 negate = !negate;
1742 break;
1743 case GT:
1744 case GE:
1745 case UNGT:
1746 case UNGE:
1747 break;
1748 default:
1749 return FALSE;
1752 start_sequence ();
1754 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1756 /* ??? It's a quandary whether cmove would be better here, especially
1757 for integers. Perhaps combine will clean things up. */
1758 if (target && negate)
1759 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1761 if (! target)
1763 end_sequence ();
1764 return FALSE;
1767 if (target != if_info->x)
1768 noce_emit_move_insn (if_info->x, target);
1770 seq = end_ifcvt_sequence (if_info);
1771 if (!seq)
1772 return FALSE;
1774 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1775 if_info->cond = cond;
1776 if_info->cond_earliest = earliest;
1778 return TRUE;
1781 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1783 static int
1784 noce_try_sign_mask (struct noce_if_info *if_info)
1786 rtx cond, t, m, c, seq;
1787 enum machine_mode mode;
1788 enum rtx_code code;
1790 if (no_new_pseudos)
1791 return FALSE;
1793 cond = if_info->cond;
1794 code = GET_CODE (cond);
1795 m = XEXP (cond, 0);
1796 c = XEXP (cond, 1);
1798 t = NULL_RTX;
1799 if (if_info->a == const0_rtx)
1801 if ((code == LT && c == const0_rtx)
1802 || (code == LE && c == constm1_rtx))
1803 t = if_info->b;
1805 else if (if_info->b == const0_rtx)
1807 if ((code == GE && c == const0_rtx)
1808 || (code == GT && c == constm1_rtx))
1809 t = if_info->a;
1812 if (! t || side_effects_p (t))
1813 return FALSE;
1815 /* We currently don't handle different modes. */
1816 mode = GET_MODE (t);
1817 if (GET_MODE (m) != mode)
1818 return FALSE;
1820 /* This is only profitable if T is cheap, or T is unconditionally
1821 executed/evaluated in the original insn sequence. */
1822 if (rtx_cost (t, SET) >= COSTS_N_INSNS (2)
1823 && (!if_info->b_unconditional
1824 || t != if_info->b))
1825 return FALSE;
1827 start_sequence ();
1828 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1829 "(signed) m >> 31" directly. This benefits targets with specialized
1830 insns to obtain the signmask, but still uses ashr_optab otherwise. */
1831 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1832 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1833 : NULL_RTX;
1835 if (!t)
1837 end_sequence ();
1838 return FALSE;
1841 noce_emit_move_insn (if_info->x, t);
1843 seq = end_ifcvt_sequence (if_info);
1844 if (!seq)
1845 return FALSE;
1847 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1848 return TRUE;
1852 /* Similar to get_condition, only the resulting condition must be
1853 valid at JUMP, instead of at EARLIEST. */
1855 static rtx
1856 noce_get_condition (rtx jump, rtx *earliest)
1858 rtx cond, set, tmp;
1859 bool reverse;
1861 if (! any_condjump_p (jump))
1862 return NULL_RTX;
1864 set = pc_set (jump);
1866 /* If this branches to JUMP_LABEL when the condition is false,
1867 reverse the condition. */
1868 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1869 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1871 /* If the condition variable is a register and is MODE_INT, accept it. */
1873 cond = XEXP (SET_SRC (set), 0);
1874 tmp = XEXP (cond, 0);
1875 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1877 *earliest = jump;
1879 if (reverse)
1880 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1881 GET_MODE (cond), tmp, XEXP (cond, 1));
1882 return cond;
1885 /* Otherwise, fall back on canonicalize_condition to do the dirty
1886 work of manipulating MODE_CC values and COMPARE rtx codes. */
1887 return canonicalize_condition (jump, cond, reverse, earliest,
1888 NULL_RTX, false, true);
1891 /* Return true if OP is ok for if-then-else processing. */
1893 static int
1894 noce_operand_ok (rtx op)
1896 /* We special-case memories, so handle any of them with
1897 no address side effects. */
1898 if (MEM_P (op))
1899 return ! side_effects_p (XEXP (op, 0));
1901 if (side_effects_p (op))
1902 return FALSE;
1904 return ! may_trap_p (op);
1907 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1908 without using conditional execution. Return TRUE if we were
1909 successful at converting the block. */
1911 static int
1912 noce_process_if_block (struct ce_if_block * ce_info)
1914 basic_block test_bb = ce_info->test_bb; /* test block */
1915 basic_block then_bb = ce_info->then_bb; /* THEN */
1916 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1917 struct noce_if_info if_info;
1918 rtx insn_a, insn_b;
1919 rtx set_a, set_b;
1920 rtx orig_x, x, a, b;
1921 rtx jump, cond;
1923 /* We're looking for patterns of the form
1925 (1) if (...) x = a; else x = b;
1926 (2) x = b; if (...) x = a;
1927 (3) if (...) x = a; // as if with an initial x = x.
1929 The later patterns require jumps to be more expensive.
1931 ??? For future expansion, look for multiple X in such patterns. */
1933 /* If test is comprised of && or || elements, don't handle it unless it is
1934 the special case of && elements without an ELSE block. */
1935 if (ce_info->num_multiple_test_blocks)
1937 if (else_bb || ! ce_info->and_and_p)
1938 return FALSE;
1940 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1941 ce_info->num_multiple_test_blocks = 0;
1942 ce_info->num_and_and_blocks = 0;
1943 ce_info->num_or_or_blocks = 0;
1946 /* If this is not a standard conditional jump, we can't parse it. */
1947 jump = BB_END (test_bb);
1948 cond = noce_get_condition (jump, &if_info.cond_earliest);
1949 if (! cond)
1950 return FALSE;
1952 /* If the conditional jump is more than just a conditional
1953 jump, then we can not do if-conversion on this block. */
1954 if (! onlyjump_p (jump))
1955 return FALSE;
1957 /* We must be comparing objects whose modes imply the size. */
1958 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1959 return FALSE;
1961 /* Look for one of the potential sets. */
1962 insn_a = first_active_insn (then_bb);
1963 if (! insn_a
1964 || insn_a != last_active_insn (then_bb, FALSE)
1965 || (set_a = single_set (insn_a)) == NULL_RTX)
1966 return FALSE;
1968 x = SET_DEST (set_a);
1969 a = SET_SRC (set_a);
1971 /* Look for the other potential set. Make sure we've got equivalent
1972 destinations. */
1973 /* ??? This is overconservative. Storing to two different mems is
1974 as easy as conditionally computing the address. Storing to a
1975 single mem merely requires a scratch memory to use as one of the
1976 destination addresses; often the memory immediately below the
1977 stack pointer is available for this. */
1978 set_b = NULL_RTX;
1979 if (else_bb)
1981 insn_b = first_active_insn (else_bb);
1982 if (! insn_b
1983 || insn_b != last_active_insn (else_bb, FALSE)
1984 || (set_b = single_set (insn_b)) == NULL_RTX
1985 || ! rtx_equal_p (x, SET_DEST (set_b)))
1986 return FALSE;
1988 else
1990 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1991 /* We're going to be moving the evaluation of B down from above
1992 COND_EARLIEST to JUMP. Make sure the relevant data is still
1993 intact. */
1994 if (! insn_b
1995 || !NONJUMP_INSN_P (insn_b)
1996 || (set_b = single_set (insn_b)) == NULL_RTX
1997 || ! rtx_equal_p (x, SET_DEST (set_b))
1998 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1999 || modified_between_p (SET_SRC (set_b),
2000 PREV_INSN (if_info.cond_earliest), jump)
2001 /* Likewise with X. In particular this can happen when
2002 noce_get_condition looks farther back in the instruction
2003 stream than one might expect. */
2004 || reg_overlap_mentioned_p (x, cond)
2005 || reg_overlap_mentioned_p (x, a)
2006 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
2007 insn_b = set_b = NULL_RTX;
2010 /* If x has side effects then only the if-then-else form is safe to
2011 convert. But even in that case we would need to restore any notes
2012 (such as REG_INC) at then end. That can be tricky if
2013 noce_emit_move_insn expands to more than one insn, so disable the
2014 optimization entirely for now if there are side effects. */
2015 if (side_effects_p (x))
2016 return FALSE;
2018 b = (set_b ? SET_SRC (set_b) : x);
2020 /* Only operate on register destinations, and even then avoid extending
2021 the lifetime of hard registers on small register class machines. */
2022 orig_x = x;
2023 if (!REG_P (x)
2024 || (SMALL_REGISTER_CLASSES
2025 && REGNO (x) < FIRST_PSEUDO_REGISTER))
2027 if (no_new_pseudos || GET_MODE (x) == BLKmode)
2028 return FALSE;
2029 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2030 ? XEXP (x, 0) : x));
2033 /* Don't operate on sources that may trap or are volatile. */
2034 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2035 return FALSE;
2037 /* Set up the info block for our subroutines. */
2038 if_info.test_bb = test_bb;
2039 if_info.cond = cond;
2040 if_info.jump = jump;
2041 if_info.insn_a = insn_a;
2042 if_info.insn_b = insn_b;
2043 if_info.x = x;
2044 if_info.a = a;
2045 if_info.b = b;
2046 if_info.b_unconditional = else_bb == 0;
2048 /* Try optimizations in some approximation of a useful order. */
2049 /* ??? Should first look to see if X is live incoming at all. If it
2050 isn't, we don't need anything but an unconditional set. */
2052 /* Look and see if A and B are really the same. Avoid creating silly
2053 cmove constructs that no one will fix up later. */
2054 if (rtx_equal_p (a, b))
2056 /* If we have an INSN_B, we don't have to create any new rtl. Just
2057 move the instruction that we already have. If we don't have an
2058 INSN_B, that means that A == X, and we've got a noop move. In
2059 that case don't do anything and let the code below delete INSN_A. */
2060 if (insn_b && else_bb)
2062 rtx note;
2064 if (else_bb && insn_b == BB_END (else_bb))
2065 BB_END (else_bb) = PREV_INSN (insn_b);
2066 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2068 /* If there was a REG_EQUAL note, delete it since it may have been
2069 true due to this insn being after a jump. */
2070 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2071 remove_note (insn_b, note);
2073 insn_b = NULL_RTX;
2075 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2076 x must be executed twice. */
2077 else if (insn_b && side_effects_p (orig_x))
2078 return FALSE;
2080 x = orig_x;
2081 goto success;
2084 /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2085 for most optimizations if writing to x may trap, i.e. it's a memory
2086 other than a static var or a stack slot. */
2087 if (! set_b
2088 && MEM_P (orig_x)
2089 && ! MEM_NOTRAP_P (orig_x)
2090 && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
2092 if (HAVE_conditional_move)
2094 if (noce_try_cmove (&if_info))
2095 goto success;
2096 if (! HAVE_conditional_execution
2097 && noce_try_cmove_arith (&if_info))
2098 goto success;
2100 return FALSE;
2103 if (noce_try_move (&if_info))
2104 goto success;
2105 if (noce_try_store_flag (&if_info))
2106 goto success;
2107 if (noce_try_minmax (&if_info))
2108 goto success;
2109 if (noce_try_abs (&if_info))
2110 goto success;
2111 if (HAVE_conditional_move
2112 && noce_try_cmove (&if_info))
2113 goto success;
2114 if (! HAVE_conditional_execution)
2116 if (noce_try_store_flag_constants (&if_info))
2117 goto success;
2118 if (noce_try_addcc (&if_info))
2119 goto success;
2120 if (noce_try_store_flag_mask (&if_info))
2121 goto success;
2122 if (HAVE_conditional_move
2123 && noce_try_cmove_arith (&if_info))
2124 goto success;
2125 if (noce_try_sign_mask (&if_info))
2126 goto success;
2129 return FALSE;
2131 success:
2132 /* The original sets may now be killed. */
2133 delete_insn (insn_a);
2135 /* Several special cases here: First, we may have reused insn_b above,
2136 in which case insn_b is now NULL. Second, we want to delete insn_b
2137 if it came from the ELSE block, because follows the now correct
2138 write that appears in the TEST block. However, if we got insn_b from
2139 the TEST block, it may in fact be loading data needed for the comparison.
2140 We'll let life_analysis remove the insn if it's really dead. */
2141 if (insn_b && else_bb)
2142 delete_insn (insn_b);
2144 /* The new insns will have been inserted immediately before the jump. We
2145 should be able to remove the jump with impunity, but the condition itself
2146 may have been modified by gcse to be shared across basic blocks. */
2147 delete_insn (jump);
2149 /* If we used a temporary, fix it up now. */
2150 if (orig_x != x)
2152 start_sequence ();
2153 noce_emit_move_insn (orig_x, x);
2154 insn_b = get_insns ();
2155 set_used_flags (orig_x);
2156 unshare_all_rtl_in_chain (insn_b);
2157 end_sequence ();
2159 emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2162 /* Merge the blocks! */
2163 merge_if_block (ce_info);
2165 return TRUE;
2168 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2169 straight line code. Return true if successful. */
2171 static int
2172 process_if_block (struct ce_if_block * ce_info)
2174 if (! reload_completed
2175 && noce_process_if_block (ce_info))
2176 return TRUE;
2178 if (HAVE_conditional_execution && reload_completed)
2180 /* If we have && and || tests, try to first handle combining the && and
2181 || tests into the conditional code, and if that fails, go back and
2182 handle it without the && and ||, which at present handles the && case
2183 if there was no ELSE block. */
2184 if (cond_exec_process_if_block (ce_info, TRUE))
2185 return TRUE;
2187 if (ce_info->num_multiple_test_blocks)
2189 cancel_changes (0);
2191 if (cond_exec_process_if_block (ce_info, FALSE))
2192 return TRUE;
2196 return FALSE;
2199 /* Merge the blocks and mark for local life update. */
2201 static void
2202 merge_if_block (struct ce_if_block * ce_info)
2204 basic_block test_bb = ce_info->test_bb; /* last test block */
2205 basic_block then_bb = ce_info->then_bb; /* THEN */
2206 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2207 basic_block join_bb = ce_info->join_bb; /* join block */
2208 basic_block combo_bb;
2210 /* All block merging is done into the lower block numbers. */
2212 combo_bb = test_bb;
2214 /* Merge any basic blocks to handle && and || subtests. Each of
2215 the blocks are on the fallthru path from the predecessor block. */
2216 if (ce_info->num_multiple_test_blocks > 0)
2218 basic_block bb = test_bb;
2219 basic_block last_test_bb = ce_info->last_test_bb;
2220 basic_block fallthru = block_fallthru (bb);
2224 bb = fallthru;
2225 fallthru = block_fallthru (bb);
2226 merge_blocks (combo_bb, bb);
2227 num_true_changes++;
2229 while (bb != last_test_bb);
2232 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2233 label, but it might if there were || tests. That label's count should be
2234 zero, and it normally should be removed. */
2236 if (then_bb)
2238 if (combo_bb->global_live_at_end)
2239 COPY_REG_SET (combo_bb->global_live_at_end,
2240 then_bb->global_live_at_end);
2241 merge_blocks (combo_bb, then_bb);
2242 num_true_changes++;
2245 /* The ELSE block, if it existed, had a label. That label count
2246 will almost always be zero, but odd things can happen when labels
2247 get their addresses taken. */
2248 if (else_bb)
2250 merge_blocks (combo_bb, else_bb);
2251 num_true_changes++;
2254 /* If there was no join block reported, that means it was not adjacent
2255 to the others, and so we cannot merge them. */
2257 if (! join_bb)
2259 rtx last = BB_END (combo_bb);
2261 /* The outgoing edge for the current COMBO block should already
2262 be correct. Verify this. */
2263 if (EDGE_COUNT (combo_bb->succs) == 0)
2265 if (find_reg_note (last, REG_NORETURN, NULL))
2267 else if (NONJUMP_INSN_P (last)
2268 && GET_CODE (PATTERN (last)) == TRAP_IF
2269 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2271 else
2272 abort ();
2275 /* There should still be something at the end of the THEN or ELSE
2276 blocks taking us to our final destination. */
2277 else if (JUMP_P (last))
2279 else if (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2280 && CALL_P (last)
2281 && SIBLING_CALL_P (last))
2283 else if ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2284 && can_throw_internal (last))
2286 else
2287 abort ();
2290 /* The JOIN block may have had quite a number of other predecessors too.
2291 Since we've already merged the TEST, THEN and ELSE blocks, we should
2292 have only one remaining edge from our if-then-else diamond. If there
2293 is more than one remaining edge, it must come from elsewhere. There
2294 may be zero incoming edges if the THEN block didn't actually join
2295 back up (as with a call to abort). */
2296 else if (EDGE_COUNT (join_bb->preds) < 2
2297 && join_bb != EXIT_BLOCK_PTR)
2299 /* We can merge the JOIN. */
2300 if (combo_bb->global_live_at_end)
2301 COPY_REG_SET (combo_bb->global_live_at_end,
2302 join_bb->global_live_at_end);
2304 merge_blocks (combo_bb, join_bb);
2305 num_true_changes++;
2307 else
2309 /* We cannot merge the JOIN. */
2311 /* The outgoing edge for the current COMBO block should already
2312 be correct. Verify this. */
2313 if (EDGE_COUNT (combo_bb->succs) > 1
2314 || EDGE_SUCC (combo_bb, 0)->dest != join_bb)
2315 abort ();
2317 /* Remove the jump and cruft from the end of the COMBO block. */
2318 if (join_bb != EXIT_BLOCK_PTR)
2319 tidy_fallthru_edge (EDGE_SUCC (combo_bb, 0));
2322 num_updated_if_blocks++;
2325 /* Find a block ending in a simple IF condition and try to transform it
2326 in some way. When converting a multi-block condition, put the new code
2327 in the first such block and delete the rest. Return a pointer to this
2328 first block if some transformation was done. Return NULL otherwise. */
2330 static basic_block
2331 find_if_header (basic_block test_bb, int pass)
2333 ce_if_block_t ce_info;
2334 edge then_edge;
2335 edge else_edge;
2337 /* The kind of block we're looking for has exactly two successors. */
2338 if (EDGE_COUNT (test_bb->succs) != 2)
2339 return NULL;
2341 then_edge = EDGE_SUCC (test_bb, 0);
2342 else_edge = EDGE_SUCC (test_bb, 1);
2344 /* Neither edge should be abnormal. */
2345 if ((then_edge->flags & EDGE_COMPLEX)
2346 || (else_edge->flags & EDGE_COMPLEX))
2347 return NULL;
2349 /* Nor exit the loop. */
2350 if ((then_edge->flags & EDGE_LOOP_EXIT)
2351 || (else_edge->flags & EDGE_LOOP_EXIT))
2352 return NULL;
2354 /* The THEN edge is canonically the one that falls through. */
2355 if (then_edge->flags & EDGE_FALLTHRU)
2357 else if (else_edge->flags & EDGE_FALLTHRU)
2359 edge e = else_edge;
2360 else_edge = then_edge;
2361 then_edge = e;
2363 else
2364 /* Otherwise this must be a multiway branch of some sort. */
2365 return NULL;
2367 memset (&ce_info, '\0', sizeof (ce_info));
2368 ce_info.test_bb = test_bb;
2369 ce_info.then_bb = then_edge->dest;
2370 ce_info.else_bb = else_edge->dest;
2371 ce_info.pass = pass;
2373 #ifdef IFCVT_INIT_EXTRA_FIELDS
2374 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2375 #endif
2377 if (find_if_block (&ce_info))
2378 goto success;
2380 if (HAVE_trap && HAVE_conditional_trap
2381 && find_cond_trap (test_bb, then_edge, else_edge))
2382 goto success;
2384 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2385 && (! HAVE_conditional_execution || reload_completed))
2387 if (find_if_case_1 (test_bb, then_edge, else_edge))
2388 goto success;
2389 if (find_if_case_2 (test_bb, then_edge, else_edge))
2390 goto success;
2393 return NULL;
2395 success:
2396 if (dump_file)
2397 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2398 return ce_info.test_bb;
2401 /* Return true if a block has two edges, one of which falls through to the next
2402 block, and the other jumps to a specific block, so that we can tell if the
2403 block is part of an && test or an || test. Returns either -1 or the number
2404 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2406 static int
2407 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2409 edge cur_edge;
2410 int fallthru_p = FALSE;
2411 int jump_p = FALSE;
2412 rtx insn;
2413 rtx end;
2414 int n_insns = 0;
2415 edge_iterator ei;
2417 if (!cur_bb || !target_bb)
2418 return -1;
2420 /* If no edges, obviously it doesn't jump or fallthru. */
2421 if (EDGE_COUNT (cur_bb->succs) == 0)
2422 return FALSE;
2424 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
2426 if (cur_edge->flags & EDGE_COMPLEX)
2427 /* Anything complex isn't what we want. */
2428 return -1;
2430 else if (cur_edge->flags & EDGE_FALLTHRU)
2431 fallthru_p = TRUE;
2433 else if (cur_edge->dest == target_bb)
2434 jump_p = TRUE;
2436 else
2437 return -1;
2440 if ((jump_p & fallthru_p) == 0)
2441 return -1;
2443 /* Don't allow calls in the block, since this is used to group && and ||
2444 together for conditional execution support. ??? we should support
2445 conditional execution support across calls for IA-64 some day, but
2446 for now it makes the code simpler. */
2447 end = BB_END (cur_bb);
2448 insn = BB_HEAD (cur_bb);
2450 while (insn != NULL_RTX)
2452 if (CALL_P (insn))
2453 return -1;
2455 if (INSN_P (insn)
2456 && !JUMP_P (insn)
2457 && GET_CODE (PATTERN (insn)) != USE
2458 && GET_CODE (PATTERN (insn)) != CLOBBER)
2459 n_insns++;
2461 if (insn == end)
2462 break;
2464 insn = NEXT_INSN (insn);
2467 return n_insns;
2470 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2471 block. If so, we'll try to convert the insns to not require the branch.
2472 Return TRUE if we were successful at converting the block. */
2474 static int
2475 find_if_block (struct ce_if_block * ce_info)
2477 basic_block test_bb = ce_info->test_bb;
2478 basic_block then_bb = ce_info->then_bb;
2479 basic_block else_bb = ce_info->else_bb;
2480 basic_block join_bb = NULL_BLOCK;
2481 edge cur_edge;
2482 basic_block next;
2483 edge_iterator ei;
2485 ce_info->last_test_bb = test_bb;
2487 /* Discover if any fall through predecessors of the current test basic block
2488 were && tests (which jump to the else block) or || tests (which jump to
2489 the then block). */
2490 if (HAVE_conditional_execution && reload_completed
2491 && EDGE_COUNT (test_bb->preds) == 1
2492 && EDGE_PRED (test_bb, 0)->flags == EDGE_FALLTHRU)
2494 basic_block bb = EDGE_PRED (test_bb, 0)->src;
2495 basic_block target_bb;
2496 int max_insns = MAX_CONDITIONAL_EXECUTE;
2497 int n_insns;
2499 /* Determine if the preceding block is an && or || block. */
2500 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2502 ce_info->and_and_p = TRUE;
2503 target_bb = else_bb;
2505 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2507 ce_info->and_and_p = FALSE;
2508 target_bb = then_bb;
2510 else
2511 target_bb = NULL_BLOCK;
2513 if (target_bb && n_insns <= max_insns)
2515 int total_insns = 0;
2516 int blocks = 0;
2518 ce_info->last_test_bb = test_bb;
2520 /* Found at least one && or || block, look for more. */
2523 ce_info->test_bb = test_bb = bb;
2524 total_insns += n_insns;
2525 blocks++;
2527 if (EDGE_COUNT (bb->preds) != 1)
2528 break;
2530 bb = EDGE_PRED (bb, 0)->src;
2531 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2533 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2535 ce_info->num_multiple_test_blocks = blocks;
2536 ce_info->num_multiple_test_insns = total_insns;
2538 if (ce_info->and_and_p)
2539 ce_info->num_and_and_blocks = blocks;
2540 else
2541 ce_info->num_or_or_blocks = blocks;
2545 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2546 other than any || blocks which jump to the THEN block. */
2547 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
2548 return FALSE;
2550 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
2551 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
2553 if (cur_edge->flags & EDGE_COMPLEX)
2554 return FALSE;
2557 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
2559 if (cur_edge->flags & EDGE_COMPLEX)
2560 return FALSE;
2563 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2564 if (EDGE_COUNT (then_bb->succs) > 0
2565 && (EDGE_COUNT (then_bb->succs) > 1
2566 || (EDGE_SUCC (then_bb, 0)->flags & EDGE_COMPLEX)
2567 || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2568 return FALSE;
2570 /* If the THEN block has no successors, conditional execution can still
2571 make a conditional call. Don't do this unless the ELSE block has
2572 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2573 Check for the last insn of the THEN block being an indirect jump, which
2574 is listed as not having any successors, but confuses the rest of the CE
2575 code processing. ??? we should fix this in the future. */
2576 if (EDGE_COUNT (then_bb->succs) == 0)
2578 if (EDGE_COUNT (else_bb->preds) == 1)
2580 rtx last_insn = BB_END (then_bb);
2582 while (last_insn
2583 && NOTE_P (last_insn)
2584 && last_insn != BB_HEAD (then_bb))
2585 last_insn = PREV_INSN (last_insn);
2587 if (last_insn
2588 && JUMP_P (last_insn)
2589 && ! simplejump_p (last_insn))
2590 return FALSE;
2592 join_bb = else_bb;
2593 else_bb = NULL_BLOCK;
2595 else
2596 return FALSE;
2599 /* If the THEN block's successor is the other edge out of the TEST block,
2600 then we have an IF-THEN combo without an ELSE. */
2601 else if (EDGE_SUCC (then_bb, 0)->dest == else_bb)
2603 join_bb = else_bb;
2604 else_bb = NULL_BLOCK;
2607 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2608 has exactly one predecessor and one successor, and the outgoing edge
2609 is not complex, then we have an IF-THEN-ELSE combo. */
2610 else if (EDGE_COUNT (else_bb->succs) == 1
2611 && EDGE_SUCC (then_bb, 0)->dest == EDGE_SUCC (else_bb, 0)->dest
2612 && EDGE_COUNT (else_bb->preds) == 1
2613 && ! (EDGE_SUCC (else_bb, 0)->flags & EDGE_COMPLEX)
2614 && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2615 join_bb = EDGE_SUCC (else_bb, 0)->dest;
2617 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2618 else
2619 return FALSE;
2621 num_possible_if_blocks++;
2623 if (dump_file)
2625 fprintf (dump_file,
2626 "\nIF-THEN%s block found, pass %d, start block %d "
2627 "[insn %d], then %d [%d]",
2628 (else_bb) ? "-ELSE" : "",
2629 ce_info->pass,
2630 test_bb->index,
2631 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2632 then_bb->index,
2633 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2635 if (else_bb)
2636 fprintf (dump_file, ", else %d [%d]",
2637 else_bb->index,
2638 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2640 fprintf (dump_file, ", join %d [%d]",
2641 join_bb->index,
2642 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2644 if (ce_info->num_multiple_test_blocks > 0)
2645 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
2646 ce_info->num_multiple_test_blocks,
2647 (ce_info->and_and_p) ? "&&" : "||",
2648 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2649 ce_info->last_test_bb->index,
2650 ((BB_HEAD (ce_info->last_test_bb))
2651 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2652 : -1));
2654 fputc ('\n', dump_file);
2657 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2658 first condition for free, since we've already asserted that there's a
2659 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2660 we checked the FALLTHRU flag, those are already adjacent to the last IF
2661 block. */
2662 /* ??? As an enhancement, move the ELSE block. Have to deal with
2663 BLOCK notes, if by no other means than aborting the merge if they
2664 exist. Sticky enough I don't want to think about it now. */
2665 next = then_bb;
2666 if (else_bb && (next = next->next_bb) != else_bb)
2667 return FALSE;
2668 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2670 if (else_bb)
2671 join_bb = NULL;
2672 else
2673 return FALSE;
2676 /* Do the real work. */
2677 ce_info->else_bb = else_bb;
2678 ce_info->join_bb = join_bb;
2680 return process_if_block (ce_info);
2683 /* Convert a branch over a trap, or a branch
2684 to a trap, into a conditional trap. */
2686 static int
2687 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2689 basic_block then_bb = then_edge->dest;
2690 basic_block else_bb = else_edge->dest;
2691 basic_block other_bb, trap_bb;
2692 rtx trap, jump, cond, cond_earliest, seq;
2693 enum rtx_code code;
2695 /* Locate the block with the trap instruction. */
2696 /* ??? While we look for no successors, we really ought to allow
2697 EH successors. Need to fix merge_if_block for that to work. */
2698 if ((trap = block_has_only_trap (then_bb)) != NULL)
2699 trap_bb = then_bb, other_bb = else_bb;
2700 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2701 trap_bb = else_bb, other_bb = then_bb;
2702 else
2703 return FALSE;
2705 if (dump_file)
2707 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2708 test_bb->index, trap_bb->index);
2711 /* If this is not a standard conditional jump, we can't parse it. */
2712 jump = BB_END (test_bb);
2713 cond = noce_get_condition (jump, &cond_earliest);
2714 if (! cond)
2715 return FALSE;
2717 /* If the conditional jump is more than just a conditional jump, then
2718 we can not do if-conversion on this block. */
2719 if (! onlyjump_p (jump))
2720 return FALSE;
2722 /* We must be comparing objects whose modes imply the size. */
2723 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2724 return FALSE;
2726 /* Reverse the comparison code, if necessary. */
2727 code = GET_CODE (cond);
2728 if (then_bb == trap_bb)
2730 code = reversed_comparison_code (cond, jump);
2731 if (code == UNKNOWN)
2732 return FALSE;
2735 /* Attempt to generate the conditional trap. */
2736 seq = gen_cond_trap (code, XEXP (cond, 0),
2737 XEXP (cond, 1),
2738 TRAP_CODE (PATTERN (trap)));
2739 if (seq == NULL)
2740 return FALSE;
2742 num_true_changes++;
2744 /* Emit the new insns before cond_earliest. */
2745 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2747 /* Delete the trap block if possible. */
2748 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2749 if (EDGE_COUNT (trap_bb->preds) == 0)
2750 delete_basic_block (trap_bb);
2752 /* If the non-trap block and the test are now adjacent, merge them.
2753 Otherwise we must insert a direct branch. */
2754 if (test_bb->next_bb == other_bb)
2756 struct ce_if_block new_ce_info;
2757 delete_insn (jump);
2758 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2759 new_ce_info.test_bb = test_bb;
2760 new_ce_info.then_bb = NULL;
2761 new_ce_info.else_bb = NULL;
2762 new_ce_info.join_bb = other_bb;
2763 merge_if_block (&new_ce_info);
2765 else
2767 rtx lab, newjump;
2769 lab = JUMP_LABEL (jump);
2770 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2771 LABEL_NUSES (lab) += 1;
2772 JUMP_LABEL (newjump) = lab;
2773 emit_barrier_after (newjump);
2775 delete_insn (jump);
2778 return TRUE;
2781 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2782 return it. */
2784 static rtx
2785 block_has_only_trap (basic_block bb)
2787 rtx trap;
2789 /* We're not the exit block. */
2790 if (bb == EXIT_BLOCK_PTR)
2791 return NULL_RTX;
2793 /* The block must have no successors. */
2794 if (EDGE_COUNT (bb->succs) > 0)
2795 return NULL_RTX;
2797 /* The only instruction in the THEN block must be the trap. */
2798 trap = first_active_insn (bb);
2799 if (! (trap == BB_END (bb)
2800 && GET_CODE (PATTERN (trap)) == TRAP_IF
2801 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2802 return NULL_RTX;
2804 return trap;
2807 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2808 transformable, but not necessarily the other. There need be no
2809 JOIN block.
2811 Return TRUE if we were successful at converting the block.
2813 Cases we'd like to look at:
2816 if (test) goto over; // x not live
2817 x = a;
2818 goto label;
2819 over:
2821 becomes
2823 x = a;
2824 if (! test) goto label;
2827 if (test) goto E; // x not live
2828 x = big();
2829 goto L;
2831 x = b;
2832 goto M;
2834 becomes
2836 x = b;
2837 if (test) goto M;
2838 x = big();
2839 goto L;
2841 (3) // This one's really only interesting for targets that can do
2842 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2843 // it results in multiple branches on a cache line, which often
2844 // does not sit well with predictors.
2846 if (test1) goto E; // predicted not taken
2847 x = a;
2848 if (test2) goto F;
2851 x = b;
2854 becomes
2856 x = a;
2857 if (test1) goto E;
2858 if (test2) goto F;
2860 Notes:
2862 (A) Don't do (2) if the branch is predicted against the block we're
2863 eliminating. Do it anyway if we can eliminate a branch; this requires
2864 that the sole successor of the eliminated block postdominate the other
2865 side of the if.
2867 (B) With CE, on (3) we can steal from both sides of the if, creating
2869 if (test1) x = a;
2870 if (!test1) x = b;
2871 if (test1) goto J;
2872 if (test2) goto F;
2876 Again, this is most useful if J postdominates.
2878 (C) CE substitutes for helpful life information.
2880 (D) These heuristics need a lot of work. */
2882 /* Tests for case 1 above. */
2884 static int
2885 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2887 basic_block then_bb = then_edge->dest;
2888 basic_block else_bb = else_edge->dest, new_bb;
2889 int then_bb_index;
2891 /* If we are partitioning hot/cold basic blocks, we don't want to
2892 mess up unconditional or indirect jumps that cross between hot
2893 and cold sections.
2895 Basic block partitioning may result in some jumps that appear to
2896 be optimizable (or blocks that appear to be mergeable), but which really
2897 must be left untouched (they are required to make it safely across
2898 partition boundaries). See the comments at the top of
2899 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2901 if (flag_reorder_blocks_and_partition
2902 && ((BB_END (then_bb)
2903 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
2904 || (BB_END (else_bb)
2905 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
2906 NULL_RTX))))
2907 return FALSE;
2909 /* THEN has one successor. */
2910 if (EDGE_COUNT (then_bb->succs) != 1)
2911 return FALSE;
2913 /* THEN does not fall through, but is not strange either. */
2914 if (EDGE_SUCC (then_bb, 0)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2915 return FALSE;
2917 /* THEN has one predecessor. */
2918 if (EDGE_COUNT (then_bb->preds) != 1)
2919 return FALSE;
2921 /* THEN must do something. */
2922 if (forwarder_block_p (then_bb))
2923 return FALSE;
2925 num_possible_if_blocks++;
2926 if (dump_file)
2927 fprintf (dump_file,
2928 "\nIF-CASE-1 found, start %d, then %d\n",
2929 test_bb->index, then_bb->index);
2931 /* THEN is small. */
2932 if (! cheap_bb_rtx_cost_p (then_bb, COSTS_N_INSNS (BRANCH_COST)))
2933 return FALSE;
2935 /* Registers set are dead, or are predicable. */
2936 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2937 EDGE_SUCC (then_bb, 0)->dest, 1))
2938 return FALSE;
2940 /* Conversion went ok, including moving the insns and fixing up the
2941 jump. Adjust the CFG to match. */
2943 bitmap_ior (test_bb->global_live_at_end,
2944 else_bb->global_live_at_start,
2945 then_bb->global_live_at_end);
2948 /* We can avoid creating a new basic block if then_bb is immediately
2949 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
2950 thru to else_bb. */
2952 if (then_bb->next_bb == else_bb
2953 && then_bb->prev_bb == test_bb
2954 && else_bb != EXIT_BLOCK_PTR)
2956 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
2957 new_bb = 0;
2959 else
2960 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
2961 else_bb);
2963 then_bb_index = then_bb->index;
2964 delete_basic_block (then_bb);
2966 /* Make rest of code believe that the newly created block is the THEN_BB
2967 block we removed. */
2968 if (new_bb)
2970 new_bb->index = then_bb_index;
2971 BASIC_BLOCK (then_bb_index) = new_bb;
2972 /* Since the fallthru edge was redirected from test_bb to new_bb,
2973 we need to ensure that new_bb is in the same partition as
2974 test bb (you can not fall through across section boundaries). */
2975 BB_COPY_PARTITION (new_bb, test_bb);
2977 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2978 later. */
2980 num_true_changes++;
2981 num_updated_if_blocks++;
2983 return TRUE;
2986 /* Test for case 2 above. */
2988 static int
2989 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2991 basic_block then_bb = then_edge->dest;
2992 basic_block else_bb = else_edge->dest;
2993 edge else_succ;
2994 rtx note;
2996 /* If we are partitioning hot/cold basic blocks, we don't want to
2997 mess up unconditional or indirect jumps that cross between hot
2998 and cold sections.
3000 Basic block partitioning may result in some jumps that appear to
3001 be optimizable (or blocks that appear to be mergeable), but which really
3002 must be left untouched (they are required to make it safely across
3003 partition boundaries). See the comments at the top of
3004 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3006 if (flag_reorder_blocks_and_partition
3007 && ((BB_END (then_bb)
3008 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3009 || (BB_END (else_bb)
3010 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3011 NULL_RTX))))
3012 return FALSE;
3014 /* ELSE has one successor. */
3015 if (EDGE_COUNT (else_bb->succs) != 1)
3016 return FALSE;
3017 else
3018 else_succ = EDGE_SUCC (else_bb, 0);
3020 /* ELSE outgoing edge is not complex. */
3021 if (else_succ->flags & EDGE_COMPLEX)
3022 return FALSE;
3024 /* ELSE has one predecessor. */
3025 if (EDGE_COUNT (else_bb->preds) != 1)
3026 return FALSE;
3028 /* THEN is not EXIT. */
3029 if (then_bb->index < 0)
3030 return FALSE;
3032 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
3033 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3034 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3036 else if (else_succ->dest->index < 0
3037 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3038 else_succ->dest))
3040 else
3041 return FALSE;
3043 num_possible_if_blocks++;
3044 if (dump_file)
3045 fprintf (dump_file,
3046 "\nIF-CASE-2 found, start %d, else %d\n",
3047 test_bb->index, else_bb->index);
3049 /* ELSE is small. */
3050 if (! cheap_bb_rtx_cost_p (else_bb, COSTS_N_INSNS (BRANCH_COST)))
3051 return FALSE;
3053 /* Registers set are dead, or are predicable. */
3054 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3055 return FALSE;
3057 /* Conversion went ok, including moving the insns and fixing up the
3058 jump. Adjust the CFG to match. */
3060 bitmap_ior (test_bb->global_live_at_end,
3061 then_bb->global_live_at_start,
3062 else_bb->global_live_at_end);
3064 delete_basic_block (else_bb);
3066 num_true_changes++;
3067 num_updated_if_blocks++;
3069 /* ??? We may now fallthru from one of THEN's successors into a join
3070 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3072 return TRUE;
3075 /* A subroutine of dead_or_predicable called through for_each_rtx.
3076 Return 1 if a memory is found. */
3078 static int
3079 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3081 return MEM_P (*px);
3084 /* Used by the code above to perform the actual rtl transformations.
3085 Return TRUE if successful.
3087 TEST_BB is the block containing the conditional branch. MERGE_BB
3088 is the block containing the code to manipulate. NEW_DEST is the
3089 label TEST_BB should be branching to after the conversion.
3090 REVERSEP is true if the sense of the branch should be reversed. */
3092 static int
3093 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3094 basic_block other_bb, basic_block new_dest, int reversep)
3096 rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3098 jump = BB_END (test_bb);
3100 /* Find the extent of the real code in the merge block. */
3101 head = BB_HEAD (merge_bb);
3102 end = BB_END (merge_bb);
3104 if (LABEL_P (head))
3105 head = NEXT_INSN (head);
3106 if (NOTE_P (head))
3108 if (head == end)
3110 head = end = NULL_RTX;
3111 goto no_body;
3113 head = NEXT_INSN (head);
3116 if (JUMP_P (end))
3118 if (head == end)
3120 head = end = NULL_RTX;
3121 goto no_body;
3123 end = PREV_INSN (end);
3126 /* Disable handling dead code by conditional execution if the machine needs
3127 to do anything funny with the tests, etc. */
3128 #ifndef IFCVT_MODIFY_TESTS
3129 if (HAVE_conditional_execution)
3131 /* In the conditional execution case, we have things easy. We know
3132 the condition is reversible. We don't have to check life info
3133 because we're going to conditionally execute the code anyway.
3134 All that's left is making sure the insns involved can actually
3135 be predicated. */
3137 rtx cond, prob_val;
3139 cond = cond_exec_get_condition (jump);
3140 if (! cond)
3141 return FALSE;
3143 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3144 if (prob_val)
3145 prob_val = XEXP (prob_val, 0);
3147 if (reversep)
3149 enum rtx_code rev = reversed_comparison_code (cond, jump);
3150 if (rev == UNKNOWN)
3151 return FALSE;
3152 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3153 XEXP (cond, 1));
3154 if (prob_val)
3155 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3158 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3159 prob_val, 0))
3160 goto cancel;
3162 earliest = jump;
3164 else
3165 #endif
3167 /* In the non-conditional execution case, we have to verify that there
3168 are no trapping operations, no calls, no references to memory, and
3169 that any registers modified are dead at the branch site. */
3171 rtx insn, cond, prev;
3172 regset merge_set, tmp, test_live, test_set;
3173 struct propagate_block_info *pbi;
3174 unsigned i, fail = 0;
3175 bitmap_iterator bi;
3177 /* Check for no calls or trapping operations. */
3178 for (insn = head; ; insn = NEXT_INSN (insn))
3180 if (CALL_P (insn))
3181 return FALSE;
3182 if (INSN_P (insn))
3184 if (may_trap_p (PATTERN (insn)))
3185 return FALSE;
3187 /* ??? Even non-trapping memories such as stack frame
3188 references must be avoided. For stores, we collect
3189 no lifetime info; for reads, we'd have to assert
3190 true_dependence false against every store in the
3191 TEST range. */
3192 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3193 return FALSE;
3195 if (insn == end)
3196 break;
3199 if (! any_condjump_p (jump))
3200 return FALSE;
3202 /* Find the extent of the conditional. */
3203 cond = noce_get_condition (jump, &earliest);
3204 if (! cond)
3205 return FALSE;
3207 /* Collect:
3208 MERGE_SET = set of registers set in MERGE_BB
3209 TEST_LIVE = set of registers live at EARLIEST
3210 TEST_SET = set of registers set between EARLIEST and the
3211 end of the block. */
3213 tmp = ALLOC_REG_SET (&reg_obstack);
3214 merge_set = ALLOC_REG_SET (&reg_obstack);
3215 test_live = ALLOC_REG_SET (&reg_obstack);
3216 test_set = ALLOC_REG_SET (&reg_obstack);
3218 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3219 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3220 since we've already asserted that MERGE_BB is small. */
3221 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3223 /* For small register class machines, don't lengthen lifetimes of
3224 hard registers before reload. */
3225 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3227 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3229 if (i < FIRST_PSEUDO_REGISTER
3230 && ! fixed_regs[i]
3231 && ! global_regs[i])
3232 fail = 1;
3236 /* For TEST, we're interested in a range of insns, not a whole block.
3237 Moreover, we're interested in the insns live from OTHER_BB. */
3239 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3240 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3243 for (insn = jump; ; insn = prev)
3245 prev = propagate_one_insn (pbi, insn);
3246 if (insn == earliest)
3247 break;
3250 free_propagate_block_info (pbi);
3252 /* We can perform the transformation if
3253 MERGE_SET & (TEST_SET | TEST_LIVE)
3255 TEST_SET & merge_bb->global_live_at_start
3256 are empty. */
3258 if (bitmap_intersect_p (test_set, merge_set)
3259 || bitmap_intersect_p (test_live, merge_set)
3260 || bitmap_intersect_p (test_set, merge_bb->global_live_at_start))
3261 fail = 1;
3263 FREE_REG_SET (tmp);
3264 FREE_REG_SET (merge_set);
3265 FREE_REG_SET (test_live);
3266 FREE_REG_SET (test_set);
3268 if (fail)
3269 return FALSE;
3272 no_body:
3273 /* We don't want to use normal invert_jump or redirect_jump because
3274 we don't want to delete_insn called. Also, we want to do our own
3275 change group management. */
3277 old_dest = JUMP_LABEL (jump);
3278 if (other_bb != new_dest)
3280 new_label = block_label (new_dest);
3281 if (reversep
3282 ? ! invert_jump_1 (jump, new_label)
3283 : ! redirect_jump_1 (jump, new_label))
3284 goto cancel;
3287 if (! apply_change_group ())
3288 return FALSE;
3290 if (other_bb != new_dest)
3292 if (old_dest)
3293 LABEL_NUSES (old_dest) -= 1;
3294 if (new_label)
3295 LABEL_NUSES (new_label) += 1;
3296 JUMP_LABEL (jump) = new_label;
3297 if (reversep)
3298 invert_br_probabilities (jump);
3300 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3301 if (reversep)
3303 gcov_type count, probability;
3304 count = BRANCH_EDGE (test_bb)->count;
3305 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3306 FALLTHRU_EDGE (test_bb)->count = count;
3307 probability = BRANCH_EDGE (test_bb)->probability;
3308 BRANCH_EDGE (test_bb)->probability
3309 = FALLTHRU_EDGE (test_bb)->probability;
3310 FALLTHRU_EDGE (test_bb)->probability = probability;
3311 update_br_prob_note (test_bb);
3315 /* Move the insns out of MERGE_BB to before the branch. */
3316 if (head != NULL)
3318 if (end == BB_END (merge_bb))
3319 BB_END (merge_bb) = PREV_INSN (head);
3321 if (squeeze_notes (&head, &end))
3322 return TRUE;
3324 reorder_insns (head, end, PREV_INSN (earliest));
3327 /* Remove the jump and edge if we can. */
3328 if (other_bb == new_dest)
3330 delete_insn (jump);
3331 remove_edge (BRANCH_EDGE (test_bb));
3332 /* ??? Can't merge blocks here, as then_bb is still in use.
3333 At minimum, the merge will get done just before bb-reorder. */
3336 return TRUE;
3338 cancel:
3339 cancel_changes (0);
3340 return FALSE;
3343 /* Main entry point for all if-conversion. */
3345 void
3346 if_convert (int x_life_data_ok)
3348 basic_block bb;
3349 int pass;
3351 num_possible_if_blocks = 0;
3352 num_updated_if_blocks = 0;
3353 num_true_changes = 0;
3354 life_data_ok = (x_life_data_ok != 0);
3356 if ((! targetm.cannot_modify_jumps_p ())
3357 && (!flag_reorder_blocks_and_partition || !no_new_pseudos
3358 || !targetm.have_named_sections))
3359 mark_loop_exit_edges ();
3361 /* Compute postdominators if we think we'll use them. */
3362 if (HAVE_conditional_execution || life_data_ok)
3363 calculate_dominance_info (CDI_POST_DOMINATORS);
3365 if (life_data_ok)
3366 clear_bb_flags ();
3368 /* Go through each of the basic blocks looking for things to convert. If we
3369 have conditional execution, we make multiple passes to allow us to handle
3370 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3371 pass = 0;
3374 cond_exec_changed_p = FALSE;
3375 pass++;
3377 #ifdef IFCVT_MULTIPLE_DUMPS
3378 if (dump_file && pass > 1)
3379 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3380 #endif
3382 FOR_EACH_BB (bb)
3384 basic_block new_bb;
3385 while ((new_bb = find_if_header (bb, pass)))
3386 bb = new_bb;
3389 #ifdef IFCVT_MULTIPLE_DUMPS
3390 if (dump_file && cond_exec_changed_p)
3391 print_rtl_with_bb (dump_file, get_insns ());
3392 #endif
3394 while (cond_exec_changed_p);
3396 #ifdef IFCVT_MULTIPLE_DUMPS
3397 if (dump_file)
3398 fprintf (dump_file, "\n\n========== no more changes\n");
3399 #endif
3401 free_dominance_info (CDI_POST_DOMINATORS);
3403 if (dump_file)
3404 fflush (dump_file);
3406 clear_aux_for_blocks ();
3408 /* Rebuild life info for basic blocks that require it. */
3409 if (num_true_changes && life_data_ok)
3411 /* If we allocated new pseudos, we must resize the array for sched1. */
3412 if (max_regno < max_reg_num ())
3414 max_regno = max_reg_num ();
3415 allocate_reg_info (max_regno, FALSE, FALSE);
3417 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3418 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3419 | PROP_KILL_DEAD_CODE);
3422 /* Write the final stats. */
3423 if (dump_file && num_possible_if_blocks > 0)
3425 fprintf (dump_file,
3426 "\n%d possible IF blocks searched.\n",
3427 num_possible_if_blocks);
3428 fprintf (dump_file,
3429 "%d IF blocks converted.\n",
3430 num_updated_if_blocks);
3431 fprintf (dump_file,
3432 "%d true changes made.\n\n\n",
3433 num_true_changes);
3436 #ifdef ENABLE_CHECKING
3437 verify_flow_info ();
3438 #endif