* config/mn10300/mn10300-protos.h (mn10300_va_arg): Remove.
[official-gcc.git] / gcc / ifcvt.c
blob86774e45e07d2efc42d6dfdf6759b319f6ab1f44
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes;
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
84 /* True if life data ok at present. */
85 static bool life_data_ok;
87 /* Forward references. */
88 static int count_bb_insns (basic_block);
89 static int total_bb_rtx_cost (basic_block);
90 static rtx first_active_insn (basic_block);
91 static rtx last_active_insn (basic_block, int);
92 static basic_block block_fallthru (basic_block);
93 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
94 static rtx cond_exec_get_condition (rtx);
95 static int cond_exec_process_if_block (ce_if_block_t *, int);
96 static rtx noce_get_condition (rtx, rtx *);
97 static int noce_operand_ok (rtx);
98 static int noce_process_if_block (ce_if_block_t *);
99 static int process_if_block (ce_if_block_t *);
100 static void merge_if_block (ce_if_block_t *);
101 static int find_cond_trap (basic_block, edge, edge);
102 static basic_block find_if_header (basic_block, int);
103 static int block_jumps_and_fallthru_p (basic_block, basic_block);
104 static int find_if_block (ce_if_block_t *);
105 static int find_if_case_1 (basic_block, edge, edge);
106 static int find_if_case_2 (basic_block, edge, edge);
107 static int find_memory (rtx *, void *);
108 static int dead_or_predicable (basic_block, basic_block, basic_block,
109 basic_block, int);
110 static void noce_emit_move_insn (rtx, rtx);
111 static rtx block_has_only_trap (basic_block);
112 static void mark_loop_exit_edges (void);
114 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
115 static void
116 mark_loop_exit_edges (void)
118 struct loops loops;
119 basic_block bb;
120 edge e;
122 flow_loops_find (&loops, LOOP_TREE);
123 free_dominance_info (CDI_DOMINATORS);
125 if (loops.num > 1)
127 FOR_EACH_BB (bb)
129 for (e = bb->succ; e; e = e->succ_next)
131 if (find_common_loop (bb->loop_father, e->dest->loop_father)
132 != bb->loop_father)
133 e->flags |= EDGE_LOOP_EXIT;
134 else
135 e->flags &= ~EDGE_LOOP_EXIT;
140 flow_loops_free (&loops);
143 /* Count the number of non-jump active insns in BB. */
145 static int
146 count_bb_insns (basic_block bb)
148 int count = 0;
149 rtx insn = BB_HEAD (bb);
151 while (1)
153 if (CALL_P (insn) || NONJUMP_INSN_P (insn))
154 count++;
156 if (insn == BB_END (bb))
157 break;
158 insn = NEXT_INSN (insn);
161 return count;
164 /* Count the total rtx_cost of non-jump active insns in BB. */
166 static int
167 total_bb_rtx_cost (basic_block bb)
169 int count = 0;
170 rtx insn = BB_HEAD (bb);
172 while (1)
174 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
175 count += rtx_cost (PATTERN (insn), 0);
177 if (insn == BB_END (bb))
178 break;
179 insn = NEXT_INSN (insn);
182 return count;
185 /* Return the first non-jump active insn in the basic block. */
187 static rtx
188 first_active_insn (basic_block bb)
190 rtx insn = BB_HEAD (bb);
192 if (LABEL_P (insn))
194 if (insn == BB_END (bb))
195 return NULL_RTX;
196 insn = NEXT_INSN (insn);
199 while (NOTE_P (insn))
201 if (insn == BB_END (bb))
202 return NULL_RTX;
203 insn = NEXT_INSN (insn);
206 if (JUMP_P (insn))
207 return NULL_RTX;
209 return insn;
212 /* Return the last non-jump active (non-jump) insn in the basic block. */
214 static rtx
215 last_active_insn (basic_block bb, int skip_use_p)
217 rtx insn = BB_END (bb);
218 rtx head = BB_HEAD (bb);
220 while (NOTE_P (insn)
221 || JUMP_P (insn)
222 || (skip_use_p
223 && NONJUMP_INSN_P (insn)
224 && GET_CODE (PATTERN (insn)) == USE))
226 if (insn == head)
227 return NULL_RTX;
228 insn = PREV_INSN (insn);
231 if (LABEL_P (insn))
232 return NULL_RTX;
234 return insn;
237 /* Return the basic block reached by falling though the basic block BB. */
239 static basic_block
240 block_fallthru (basic_block bb)
242 edge e;
244 for (e = bb->succ;
245 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
246 e = e->succ_next)
249 return (e) ? e->dest : NULL_BLOCK;
252 /* Go through a bunch of insns, converting them to conditional
253 execution format if possible. Return TRUE if all of the non-note
254 insns were processed. */
256 static int
257 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
258 /* if block information */rtx start,
259 /* first insn to look at */rtx end,
260 /* last insn to look at */rtx test,
261 /* conditional execution test */rtx prob_val,
262 /* probability of branch taken. */int mod_ok)
264 int must_be_last = FALSE;
265 rtx insn;
266 rtx xtest;
267 rtx pattern;
269 if (!start || !end)
270 return FALSE;
272 for (insn = start; ; insn = NEXT_INSN (insn))
274 if (NOTE_P (insn))
275 goto insn_done;
277 if (!NONJUMP_INSN_P (insn) && !CALL_P (insn))
278 abort ();
280 /* Remove USE insns that get in the way. */
281 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
283 /* ??? Ug. Actually unlinking the thing is problematic,
284 given what we'd have to coordinate with our callers. */
285 SET_INSN_DELETED (insn);
286 goto insn_done;
289 /* Last insn wasn't last? */
290 if (must_be_last)
291 return FALSE;
293 if (modified_in_p (test, insn))
295 if (!mod_ok)
296 return FALSE;
297 must_be_last = TRUE;
300 /* Now build the conditional form of the instruction. */
301 pattern = PATTERN (insn);
302 xtest = copy_rtx (test);
304 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
305 two conditions. */
306 if (GET_CODE (pattern) == COND_EXEC)
308 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
309 return FALSE;
311 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
312 COND_EXEC_TEST (pattern));
313 pattern = COND_EXEC_CODE (pattern);
316 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
318 /* If the machine needs to modify the insn being conditionally executed,
319 say for example to force a constant integer operand into a temp
320 register, do so here. */
321 #ifdef IFCVT_MODIFY_INSN
322 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
323 if (! pattern)
324 return FALSE;
325 #endif
327 validate_change (insn, &PATTERN (insn), pattern, 1);
329 if (CALL_P (insn) && prob_val)
330 validate_change (insn, &REG_NOTES (insn),
331 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
332 REG_NOTES (insn)), 1);
334 insn_done:
335 if (insn == end)
336 break;
339 return TRUE;
342 /* Return the condition for a jump. Do not do any special processing. */
344 static rtx
345 cond_exec_get_condition (rtx jump)
347 rtx test_if, cond;
349 if (any_condjump_p (jump))
350 test_if = SET_SRC (pc_set (jump));
351 else
352 return NULL_RTX;
353 cond = XEXP (test_if, 0);
355 /* If this branches to JUMP_LABEL when the condition is false,
356 reverse the condition. */
357 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
358 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
360 enum rtx_code rev = reversed_comparison_code (cond, jump);
361 if (rev == UNKNOWN)
362 return NULL_RTX;
364 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
365 XEXP (cond, 1));
368 return cond;
371 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
372 to conditional execution. Return TRUE if we were successful at
373 converting the block. */
375 static int
376 cond_exec_process_if_block (ce_if_block_t * ce_info,
377 /* if block information */int do_multiple_p)
379 basic_block test_bb = ce_info->test_bb; /* last test block */
380 basic_block then_bb = ce_info->then_bb; /* THEN */
381 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
382 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
383 rtx then_start; /* first insn in THEN block */
384 rtx then_end; /* last insn + 1 in THEN block */
385 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
386 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
387 int max; /* max # of insns to convert. */
388 int then_mod_ok; /* whether conditional mods are ok in THEN */
389 rtx true_expr; /* test for else block insns */
390 rtx false_expr; /* test for then block insns */
391 rtx true_prob_val; /* probability of else block */
392 rtx false_prob_val; /* probability of then block */
393 int n_insns;
394 enum rtx_code false_code;
396 /* If test is comprised of && or || elements, and we've failed at handling
397 all of them together, just use the last test if it is the special case of
398 && elements without an ELSE block. */
399 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
401 if (else_bb || ! ce_info->and_and_p)
402 return FALSE;
404 ce_info->test_bb = test_bb = ce_info->last_test_bb;
405 ce_info->num_multiple_test_blocks = 0;
406 ce_info->num_and_and_blocks = 0;
407 ce_info->num_or_or_blocks = 0;
410 /* Find the conditional jump to the ELSE or JOIN part, and isolate
411 the test. */
412 test_expr = cond_exec_get_condition (BB_END (test_bb));
413 if (! test_expr)
414 return FALSE;
416 /* If the conditional jump is more than just a conditional jump,
417 then we can not do conditional execution conversion on this block. */
418 if (! onlyjump_p (BB_END (test_bb)))
419 return FALSE;
421 /* Collect the bounds of where we're to search, skipping any labels, jumps
422 and notes at the beginning and end of the block. Then count the total
423 number of insns and see if it is small enough to convert. */
424 then_start = first_active_insn (then_bb);
425 then_end = last_active_insn (then_bb, TRUE);
426 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
427 max = MAX_CONDITIONAL_EXECUTE;
429 if (else_bb)
431 max *= 2;
432 else_start = first_active_insn (else_bb);
433 else_end = last_active_insn (else_bb, TRUE);
434 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
437 if (n_insns > max)
438 return FALSE;
440 /* Map test_expr/test_jump into the appropriate MD tests to use on
441 the conditionally executed code. */
443 true_expr = test_expr;
445 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
446 if (false_code != UNKNOWN)
447 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
448 XEXP (true_expr, 0), XEXP (true_expr, 1));
449 else
450 false_expr = NULL_RTX;
452 #ifdef IFCVT_MODIFY_TESTS
453 /* If the machine description needs to modify the tests, such as setting a
454 conditional execution register from a comparison, it can do so here. */
455 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
457 /* See if the conversion failed. */
458 if (!true_expr || !false_expr)
459 goto fail;
460 #endif
462 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
463 if (true_prob_val)
465 true_prob_val = XEXP (true_prob_val, 0);
466 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
468 else
469 false_prob_val = NULL_RTX;
471 /* If we have && or || tests, do them here. These tests are in the adjacent
472 blocks after the first block containing the test. */
473 if (ce_info->num_multiple_test_blocks > 0)
475 basic_block bb = test_bb;
476 basic_block last_test_bb = ce_info->last_test_bb;
478 if (! false_expr)
479 goto fail;
483 rtx start, end;
484 rtx t, f;
486 bb = block_fallthru (bb);
487 start = first_active_insn (bb);
488 end = last_active_insn (bb, TRUE);
489 if (start
490 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
491 false_prob_val, FALSE))
492 goto fail;
494 /* If the conditional jump is more than just a conditional jump, then
495 we can not do conditional execution conversion on this block. */
496 if (! onlyjump_p (BB_END (bb)))
497 goto fail;
499 /* Find the conditional jump and isolate the test. */
500 t = cond_exec_get_condition (BB_END (bb));
501 if (! t)
502 goto fail;
504 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
505 GET_MODE (t),
506 XEXP (t, 0),
507 XEXP (t, 1));
509 if (ce_info->and_and_p)
511 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
512 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
514 else
516 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
517 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
520 /* If the machine description needs to modify the tests, such as
521 setting a conditional execution register from a comparison, it can
522 do so here. */
523 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
524 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
526 /* See if the conversion failed. */
527 if (!t || !f)
528 goto fail;
529 #endif
531 true_expr = t;
532 false_expr = f;
534 while (bb != last_test_bb);
537 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
538 on then THEN block. */
539 then_mod_ok = (else_bb == NULL_BLOCK);
541 /* Go through the THEN and ELSE blocks converting the insns if possible
542 to conditional execution. */
544 if (then_end
545 && (! false_expr
546 || ! cond_exec_process_insns (ce_info, then_start, then_end,
547 false_expr, false_prob_val,
548 then_mod_ok)))
549 goto fail;
551 if (else_bb && else_end
552 && ! cond_exec_process_insns (ce_info, else_start, else_end,
553 true_expr, true_prob_val, TRUE))
554 goto fail;
556 /* If we cannot apply the changes, fail. Do not go through the normal fail
557 processing, since apply_change_group will call cancel_changes. */
558 if (! apply_change_group ())
560 #ifdef IFCVT_MODIFY_CANCEL
561 /* Cancel any machine dependent changes. */
562 IFCVT_MODIFY_CANCEL (ce_info);
563 #endif
564 return FALSE;
567 #ifdef IFCVT_MODIFY_FINAL
568 /* Do any machine dependent final modifications. */
569 IFCVT_MODIFY_FINAL (ce_info);
570 #endif
572 /* Conversion succeeded. */
573 if (dump_file)
574 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
575 n_insns, (n_insns == 1) ? " was" : "s were");
577 /* Merge the blocks! */
578 merge_if_block (ce_info);
579 cond_exec_changed_p = TRUE;
580 return TRUE;
582 fail:
583 #ifdef IFCVT_MODIFY_CANCEL
584 /* Cancel any machine dependent changes. */
585 IFCVT_MODIFY_CANCEL (ce_info);
586 #endif
588 cancel_changes (0);
589 return FALSE;
592 /* Used by noce_process_if_block to communicate with its subroutines.
594 The subroutines know that A and B may be evaluated freely. They
595 know that X is a register. They should insert new instructions
596 before cond_earliest. */
598 struct noce_if_info
600 basic_block test_bb;
601 rtx insn_a, insn_b;
602 rtx x, a, b;
603 rtx jump, cond, cond_earliest;
604 /* True if "b" was originally evaluated unconditionally. */
605 bool b_unconditional;
608 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
609 static int noce_try_move (struct noce_if_info *);
610 static int noce_try_store_flag (struct noce_if_info *);
611 static int noce_try_addcc (struct noce_if_info *);
612 static int noce_try_store_flag_constants (struct noce_if_info *);
613 static int noce_try_store_flag_mask (struct noce_if_info *);
614 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
615 rtx, rtx, rtx);
616 static int noce_try_cmove (struct noce_if_info *);
617 static int noce_try_cmove_arith (struct noce_if_info *);
618 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
619 static int noce_try_minmax (struct noce_if_info *);
620 static int noce_try_abs (struct noce_if_info *);
621 static int noce_try_sign_mask (struct noce_if_info *);
623 /* Helper function for noce_try_store_flag*. */
625 static rtx
626 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
627 int normalize)
629 rtx cond = if_info->cond;
630 int cond_complex;
631 enum rtx_code code;
633 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
634 || ! general_operand (XEXP (cond, 1), VOIDmode));
636 /* If earliest == jump, or when the condition is complex, try to
637 build the store_flag insn directly. */
639 if (cond_complex)
640 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
642 if (reversep)
643 code = reversed_comparison_code (cond, if_info->jump);
644 else
645 code = GET_CODE (cond);
647 if ((if_info->cond_earliest == if_info->jump || cond_complex)
648 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
650 rtx tmp;
652 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
653 XEXP (cond, 1));
654 tmp = gen_rtx_SET (VOIDmode, x, tmp);
656 start_sequence ();
657 tmp = emit_insn (tmp);
659 if (recog_memoized (tmp) >= 0)
661 tmp = get_insns ();
662 end_sequence ();
663 emit_insn (tmp);
665 if_info->cond_earliest = if_info->jump;
667 return x;
670 end_sequence ();
673 /* Don't even try if the comparison operands or the mode of X are weird. */
674 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
675 return NULL_RTX;
677 return emit_store_flag (x, code, XEXP (cond, 0),
678 XEXP (cond, 1), VOIDmode,
679 (code == LTU || code == LEU
680 || code == GEU || code == GTU), normalize);
683 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
684 X is the destination/target and Y is the value to copy. */
686 static void
687 noce_emit_move_insn (rtx x, rtx y)
689 enum machine_mode outmode, inmode;
690 rtx outer, inner;
691 int bitpos;
693 if (GET_CODE (x) != STRICT_LOW_PART)
695 emit_move_insn (x, y);
696 return;
699 outer = XEXP (x, 0);
700 inner = XEXP (outer, 0);
701 outmode = GET_MODE (outer);
702 inmode = GET_MODE (inner);
703 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
704 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
705 GET_MODE_BITSIZE (inmode));
708 /* Return sequence of instructions generated by if conversion. This
709 function calls end_sequence() to end the current stream, ensures
710 that are instructions are unshared, recognizable non-jump insns.
711 On failure, this function returns a NULL_RTX. */
713 static rtx
714 end_ifcvt_sequence (struct noce_if_info *if_info)
716 rtx insn;
717 rtx seq = get_insns ();
719 set_used_flags (if_info->x);
720 set_used_flags (if_info->cond);
721 unshare_all_rtl_in_chain (seq);
722 end_sequence ();
724 /* Make sure that all of the instructions emitted are recognizable,
725 and that we haven't introduced a new jump instruction.
726 As an exercise for the reader, build a general mechanism that
727 allows proper placement of required clobbers. */
728 for (insn = seq; insn; insn = NEXT_INSN (insn))
729 if (JUMP_P (insn)
730 || recog_memoized (insn) == -1)
731 return NULL_RTX;
733 return seq;
736 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
737 "if (a == b) x = a; else x = b" into "x = b". */
739 static int
740 noce_try_move (struct noce_if_info *if_info)
742 rtx cond = if_info->cond;
743 enum rtx_code code = GET_CODE (cond);
744 rtx y, seq;
746 if (code != NE && code != EQ)
747 return FALSE;
749 /* This optimization isn't valid if either A or B could be a NaN
750 or a signed zero. */
751 if (HONOR_NANS (GET_MODE (if_info->x))
752 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
753 return FALSE;
755 /* Check whether the operands of the comparison are A and in
756 either order. */
757 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
758 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
759 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
760 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
762 y = (code == EQ) ? if_info->a : if_info->b;
764 /* Avoid generating the move if the source is the destination. */
765 if (! rtx_equal_p (if_info->x, y))
767 start_sequence ();
768 noce_emit_move_insn (if_info->x, y);
769 seq = end_ifcvt_sequence (if_info);
770 if (!seq)
771 return FALSE;
773 emit_insn_before_setloc (seq, if_info->jump,
774 INSN_LOCATOR (if_info->insn_a));
776 return TRUE;
778 return FALSE;
781 /* Convert "if (test) x = 1; else x = 0".
783 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
784 tried in noce_try_store_flag_constants after noce_try_cmove has had
785 a go at the conversion. */
787 static int
788 noce_try_store_flag (struct noce_if_info *if_info)
790 int reversep;
791 rtx target, seq;
793 if (GET_CODE (if_info->b) == CONST_INT
794 && INTVAL (if_info->b) == STORE_FLAG_VALUE
795 && if_info->a == const0_rtx)
796 reversep = 0;
797 else if (if_info->b == const0_rtx
798 && GET_CODE (if_info->a) == CONST_INT
799 && INTVAL (if_info->a) == STORE_FLAG_VALUE
800 && (reversed_comparison_code (if_info->cond, if_info->jump)
801 != UNKNOWN))
802 reversep = 1;
803 else
804 return FALSE;
806 start_sequence ();
808 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
809 if (target)
811 if (target != if_info->x)
812 noce_emit_move_insn (if_info->x, target);
814 seq = end_ifcvt_sequence (if_info);
815 if (! seq)
816 return FALSE;
818 emit_insn_before_setloc (seq, if_info->jump,
819 INSN_LOCATOR (if_info->insn_a));
820 return TRUE;
822 else
824 end_sequence ();
825 return FALSE;
829 /* Convert "if (test) x = a; else x = b", for A and B constant. */
831 static int
832 noce_try_store_flag_constants (struct noce_if_info *if_info)
834 rtx target, seq;
835 int reversep;
836 HOST_WIDE_INT itrue, ifalse, diff, tmp;
837 int normalize, can_reverse;
838 enum machine_mode mode;
840 if (! no_new_pseudos
841 && GET_CODE (if_info->a) == CONST_INT
842 && GET_CODE (if_info->b) == CONST_INT)
844 mode = GET_MODE (if_info->x);
845 ifalse = INTVAL (if_info->a);
846 itrue = INTVAL (if_info->b);
848 /* Make sure we can represent the difference between the two values. */
849 if ((itrue - ifalse > 0)
850 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
851 return FALSE;
853 diff = trunc_int_for_mode (itrue - ifalse, mode);
855 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
856 != UNKNOWN);
858 reversep = 0;
859 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
860 normalize = 0;
861 else if (ifalse == 0 && exact_log2 (itrue) >= 0
862 && (STORE_FLAG_VALUE == 1
863 || BRANCH_COST >= 2))
864 normalize = 1;
865 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
866 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
867 normalize = 1, reversep = 1;
868 else if (itrue == -1
869 && (STORE_FLAG_VALUE == -1
870 || BRANCH_COST >= 2))
871 normalize = -1;
872 else if (ifalse == -1 && can_reverse
873 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
874 normalize = -1, reversep = 1;
875 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
876 || BRANCH_COST >= 3)
877 normalize = -1;
878 else
879 return FALSE;
881 if (reversep)
883 tmp = itrue; itrue = ifalse; ifalse = tmp;
884 diff = trunc_int_for_mode (-diff, mode);
887 start_sequence ();
888 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
889 if (! target)
891 end_sequence ();
892 return FALSE;
895 /* if (test) x = 3; else x = 4;
896 => x = 3 + (test == 0); */
897 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
899 target = expand_simple_binop (mode,
900 (diff == STORE_FLAG_VALUE
901 ? PLUS : MINUS),
902 GEN_INT (ifalse), target, if_info->x, 0,
903 OPTAB_WIDEN);
906 /* if (test) x = 8; else x = 0;
907 => x = (test != 0) << 3; */
908 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
910 target = expand_simple_binop (mode, ASHIFT,
911 target, GEN_INT (tmp), if_info->x, 0,
912 OPTAB_WIDEN);
915 /* if (test) x = -1; else x = b;
916 => x = -(test != 0) | b; */
917 else if (itrue == -1)
919 target = expand_simple_binop (mode, IOR,
920 target, GEN_INT (ifalse), if_info->x, 0,
921 OPTAB_WIDEN);
924 /* if (test) x = a; else x = b;
925 => x = (-(test != 0) & (b - a)) + a; */
926 else
928 target = expand_simple_binop (mode, AND,
929 target, GEN_INT (diff), if_info->x, 0,
930 OPTAB_WIDEN);
931 if (target)
932 target = expand_simple_binop (mode, PLUS,
933 target, GEN_INT (ifalse),
934 if_info->x, 0, OPTAB_WIDEN);
937 if (! target)
939 end_sequence ();
940 return FALSE;
943 if (target != if_info->x)
944 noce_emit_move_insn (if_info->x, target);
946 seq = end_ifcvt_sequence (if_info);
947 if (!seq)
948 return FALSE;
950 emit_insn_before_setloc (seq, if_info->jump,
951 INSN_LOCATOR (if_info->insn_a));
952 return TRUE;
955 return FALSE;
958 /* Convert "if (test) foo++" into "foo += (test != 0)", and
959 similarly for "foo--". */
961 static int
962 noce_try_addcc (struct noce_if_info *if_info)
964 rtx target, seq;
965 int subtract, normalize;
967 if (! no_new_pseudos
968 && GET_CODE (if_info->a) == PLUS
969 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
970 && (reversed_comparison_code (if_info->cond, if_info->jump)
971 != UNKNOWN))
973 rtx cond = if_info->cond;
974 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
976 /* First try to use addcc pattern. */
977 if (general_operand (XEXP (cond, 0), VOIDmode)
978 && general_operand (XEXP (cond, 1), VOIDmode))
980 start_sequence ();
981 target = emit_conditional_add (if_info->x, code,
982 XEXP (cond, 0),
983 XEXP (cond, 1),
984 VOIDmode,
985 if_info->b,
986 XEXP (if_info->a, 1),
987 GET_MODE (if_info->x),
988 (code == LTU || code == GEU
989 || code == LEU || code == GTU));
990 if (target)
992 if (target != if_info->x)
993 noce_emit_move_insn (if_info->x, target);
995 seq = end_ifcvt_sequence (if_info);
996 if (!seq)
997 return FALSE;
999 emit_insn_before_setloc (seq, if_info->jump,
1000 INSN_LOCATOR (if_info->insn_a));
1001 return TRUE;
1003 end_sequence ();
1006 /* If that fails, construct conditional increment or decrement using
1007 setcc. */
1008 if (BRANCH_COST >= 2
1009 && (XEXP (if_info->a, 1) == const1_rtx
1010 || XEXP (if_info->a, 1) == constm1_rtx))
1012 start_sequence ();
1013 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1014 subtract = 0, normalize = 0;
1015 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1016 subtract = 1, normalize = 0;
1017 else
1018 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1021 target = noce_emit_store_flag (if_info,
1022 gen_reg_rtx (GET_MODE (if_info->x)),
1023 1, normalize);
1025 if (target)
1026 target = expand_simple_binop (GET_MODE (if_info->x),
1027 subtract ? MINUS : PLUS,
1028 if_info->b, target, if_info->x,
1029 0, OPTAB_WIDEN);
1030 if (target)
1032 if (target != if_info->x)
1033 noce_emit_move_insn (if_info->x, target);
1035 seq = end_ifcvt_sequence (if_info);
1036 if (!seq)
1037 return FALSE;
1039 emit_insn_before_setloc (seq, if_info->jump,
1040 INSN_LOCATOR (if_info->insn_a));
1041 return TRUE;
1043 end_sequence ();
1047 return FALSE;
1050 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1052 static int
1053 noce_try_store_flag_mask (struct noce_if_info *if_info)
1055 rtx target, seq;
1056 int reversep;
1058 reversep = 0;
1059 if (! no_new_pseudos
1060 && (BRANCH_COST >= 2
1061 || STORE_FLAG_VALUE == -1)
1062 && ((if_info->a == const0_rtx
1063 && rtx_equal_p (if_info->b, if_info->x))
1064 || ((reversep = (reversed_comparison_code (if_info->cond,
1065 if_info->jump)
1066 != UNKNOWN))
1067 && if_info->b == const0_rtx
1068 && rtx_equal_p (if_info->a, if_info->x))))
1070 start_sequence ();
1071 target = noce_emit_store_flag (if_info,
1072 gen_reg_rtx (GET_MODE (if_info->x)),
1073 reversep, -1);
1074 if (target)
1075 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1076 if_info->x,
1077 target, if_info->x, 0,
1078 OPTAB_WIDEN);
1080 if (target)
1082 if (target != if_info->x)
1083 noce_emit_move_insn (if_info->x, target);
1085 seq = end_ifcvt_sequence (if_info);
1086 if (!seq)
1087 return FALSE;
1089 emit_insn_before_setloc (seq, if_info->jump,
1090 INSN_LOCATOR (if_info->insn_a));
1091 return TRUE;
1094 end_sequence ();
1097 return FALSE;
1100 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1102 static rtx
1103 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1104 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1106 /* If earliest == jump, try to build the cmove insn directly.
1107 This is helpful when combine has created some complex condition
1108 (like for alpha's cmovlbs) that we can't hope to regenerate
1109 through the normal interface. */
1111 if (if_info->cond_earliest == if_info->jump)
1113 rtx tmp;
1115 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1116 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1117 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1119 start_sequence ();
1120 tmp = emit_insn (tmp);
1122 if (recog_memoized (tmp) >= 0)
1124 tmp = get_insns ();
1125 end_sequence ();
1126 emit_insn (tmp);
1128 return x;
1131 end_sequence ();
1134 /* Don't even try if the comparison operands are weird. */
1135 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1136 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1137 return NULL_RTX;
1139 #if HAVE_conditional_move
1140 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1141 vtrue, vfalse, GET_MODE (x),
1142 (code == LTU || code == GEU
1143 || code == LEU || code == GTU));
1144 #else
1145 /* We'll never get here, as noce_process_if_block doesn't call the
1146 functions involved. Ifdef code, however, should be discouraged
1147 because it leads to typos in the code not selected. However,
1148 emit_conditional_move won't exist either. */
1149 return NULL_RTX;
1150 #endif
1153 /* Try only simple constants and registers here. More complex cases
1154 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1155 has had a go at it. */
1157 static int
1158 noce_try_cmove (struct noce_if_info *if_info)
1160 enum rtx_code code;
1161 rtx target, seq;
1163 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1164 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1166 start_sequence ();
1168 code = GET_CODE (if_info->cond);
1169 target = noce_emit_cmove (if_info, if_info->x, code,
1170 XEXP (if_info->cond, 0),
1171 XEXP (if_info->cond, 1),
1172 if_info->a, if_info->b);
1174 if (target)
1176 if (target != if_info->x)
1177 noce_emit_move_insn (if_info->x, target);
1179 seq = end_ifcvt_sequence (if_info);
1180 if (!seq)
1181 return FALSE;
1183 emit_insn_before_setloc (seq, if_info->jump,
1184 INSN_LOCATOR (if_info->insn_a));
1185 return TRUE;
1187 else
1189 end_sequence ();
1190 return FALSE;
1194 return FALSE;
1197 /* Try more complex cases involving conditional_move. */
1199 static int
1200 noce_try_cmove_arith (struct noce_if_info *if_info)
1202 rtx a = if_info->a;
1203 rtx b = if_info->b;
1204 rtx x = if_info->x;
1205 rtx insn_a, insn_b;
1206 rtx tmp, target;
1207 int is_mem = 0;
1208 enum rtx_code code;
1210 /* A conditional move from two memory sources is equivalent to a
1211 conditional on their addresses followed by a load. Don't do this
1212 early because it'll screw alias analysis. Note that we've
1213 already checked for no side effects. */
1214 if (! no_new_pseudos && cse_not_expected
1215 && MEM_P (a) && MEM_P (b)
1216 && BRANCH_COST >= 5)
1218 a = XEXP (a, 0);
1219 b = XEXP (b, 0);
1220 x = gen_reg_rtx (Pmode);
1221 is_mem = 1;
1224 /* ??? We could handle this if we knew that a load from A or B could
1225 not fault. This is also true if we've already loaded
1226 from the address along the path from ENTRY. */
1227 else if (may_trap_p (a) || may_trap_p (b))
1228 return FALSE;
1230 /* if (test) x = a + b; else x = c - d;
1231 => y = a + b;
1232 x = c - d;
1233 if (test)
1234 x = y;
1237 code = GET_CODE (if_info->cond);
1238 insn_a = if_info->insn_a;
1239 insn_b = if_info->insn_b;
1241 /* Possibly rearrange operands to make things come out more natural. */
1242 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1244 int reversep = 0;
1245 if (rtx_equal_p (b, x))
1246 reversep = 1;
1247 else if (general_operand (b, GET_MODE (b)))
1248 reversep = 1;
1250 if (reversep)
1252 code = reversed_comparison_code (if_info->cond, if_info->jump);
1253 tmp = a, a = b, b = tmp;
1254 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1258 start_sequence ();
1260 /* If either operand is complex, load it into a register first.
1261 The best way to do this is to copy the original insn. In this
1262 way we preserve any clobbers etc that the insn may have had.
1263 This is of course not possible in the IS_MEM case. */
1264 if (! general_operand (a, GET_MODE (a)))
1266 rtx set;
1268 if (no_new_pseudos)
1269 goto end_seq_and_fail;
1271 if (is_mem)
1273 tmp = gen_reg_rtx (GET_MODE (a));
1274 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1276 else if (! insn_a)
1277 goto end_seq_and_fail;
1278 else
1280 a = gen_reg_rtx (GET_MODE (a));
1281 tmp = copy_rtx (insn_a);
1282 set = single_set (tmp);
1283 SET_DEST (set) = a;
1284 tmp = emit_insn (PATTERN (tmp));
1286 if (recog_memoized (tmp) < 0)
1287 goto end_seq_and_fail;
1289 if (! general_operand (b, GET_MODE (b)))
1291 rtx set;
1293 if (no_new_pseudos)
1294 goto end_seq_and_fail;
1296 if (is_mem)
1298 tmp = gen_reg_rtx (GET_MODE (b));
1299 tmp = emit_insn (gen_rtx_SET (VOIDmode,
1300 tmp,
1301 b));
1303 else if (! insn_b)
1304 goto end_seq_and_fail;
1305 else
1307 b = gen_reg_rtx (GET_MODE (b));
1308 tmp = copy_rtx (insn_b);
1309 set = single_set (tmp);
1310 SET_DEST (set) = b;
1311 tmp = emit_insn (PATTERN (tmp));
1313 if (recog_memoized (tmp) < 0)
1314 goto end_seq_and_fail;
1317 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1318 XEXP (if_info->cond, 1), a, b);
1320 if (! target)
1321 goto end_seq_and_fail;
1323 /* If we're handling a memory for above, emit the load now. */
1324 if (is_mem)
1326 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1328 /* Copy over flags as appropriate. */
1329 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1330 MEM_VOLATILE_P (tmp) = 1;
1331 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1332 MEM_IN_STRUCT_P (tmp) = 1;
1333 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1334 MEM_SCALAR_P (tmp) = 1;
1335 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1336 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1337 set_mem_align (tmp,
1338 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1340 noce_emit_move_insn (if_info->x, tmp);
1342 else if (target != x)
1343 noce_emit_move_insn (x, target);
1345 tmp = end_ifcvt_sequence (if_info);
1346 if (!tmp)
1347 return FALSE;
1349 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1350 return TRUE;
1352 end_seq_and_fail:
1353 end_sequence ();
1354 return FALSE;
1357 /* For most cases, the simplified condition we found is the best
1358 choice, but this is not the case for the min/max/abs transforms.
1359 For these we wish to know that it is A or B in the condition. */
1361 static rtx
1362 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1363 rtx *earliest)
1365 rtx cond, set, insn;
1366 int reverse;
1368 /* If target is already mentioned in the known condition, return it. */
1369 if (reg_mentioned_p (target, if_info->cond))
1371 *earliest = if_info->cond_earliest;
1372 return if_info->cond;
1375 set = pc_set (if_info->jump);
1376 cond = XEXP (SET_SRC (set), 0);
1377 reverse
1378 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1379 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1381 /* If we're looking for a constant, try to make the conditional
1382 have that constant in it. There are two reasons why it may
1383 not have the constant we want:
1385 1. GCC may have needed to put the constant in a register, because
1386 the target can't compare directly against that constant. For
1387 this case, we look for a SET immediately before the comparison
1388 that puts a constant in that register.
1390 2. GCC may have canonicalized the conditional, for example
1391 replacing "if x < 4" with "if x <= 3". We can undo that (or
1392 make equivalent types of changes) to get the constants we need
1393 if they're off by one in the right direction. */
1395 if (GET_CODE (target) == CONST_INT)
1397 enum rtx_code code = GET_CODE (if_info->cond);
1398 rtx op_a = XEXP (if_info->cond, 0);
1399 rtx op_b = XEXP (if_info->cond, 1);
1400 rtx prev_insn;
1402 /* First, look to see if we put a constant in a register. */
1403 prev_insn = PREV_INSN (if_info->cond_earliest);
1404 if (prev_insn
1405 && INSN_P (prev_insn)
1406 && GET_CODE (PATTERN (prev_insn)) == SET)
1408 rtx src = find_reg_equal_equiv_note (prev_insn);
1409 if (!src)
1410 src = SET_SRC (PATTERN (prev_insn));
1411 if (GET_CODE (src) == CONST_INT)
1413 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1414 op_a = src;
1415 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1416 op_b = src;
1418 if (GET_CODE (op_a) == CONST_INT)
1420 rtx tmp = op_a;
1421 op_a = op_b;
1422 op_b = tmp;
1423 code = swap_condition (code);
1428 /* Now, look to see if we can get the right constant by
1429 adjusting the conditional. */
1430 if (GET_CODE (op_b) == CONST_INT)
1432 HOST_WIDE_INT desired_val = INTVAL (target);
1433 HOST_WIDE_INT actual_val = INTVAL (op_b);
1435 switch (code)
1437 case LT:
1438 if (actual_val == desired_val + 1)
1440 code = LE;
1441 op_b = GEN_INT (desired_val);
1443 break;
1444 case LE:
1445 if (actual_val == desired_val - 1)
1447 code = LT;
1448 op_b = GEN_INT (desired_val);
1450 break;
1451 case GT:
1452 if (actual_val == desired_val - 1)
1454 code = GE;
1455 op_b = GEN_INT (desired_val);
1457 break;
1458 case GE:
1459 if (actual_val == desired_val + 1)
1461 code = GT;
1462 op_b = GEN_INT (desired_val);
1464 break;
1465 default:
1466 break;
1470 /* If we made any changes, generate a new conditional that is
1471 equivalent to what we started with, but has the right
1472 constants in it. */
1473 if (code != GET_CODE (if_info->cond)
1474 || op_a != XEXP (if_info->cond, 0)
1475 || op_b != XEXP (if_info->cond, 1))
1477 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1478 *earliest = if_info->cond_earliest;
1479 return cond;
1483 cond = canonicalize_condition (if_info->jump, cond, reverse,
1484 earliest, target, false);
1485 if (! cond || ! reg_mentioned_p (target, cond))
1486 return NULL;
1488 /* We almost certainly searched back to a different place.
1489 Need to re-verify correct lifetimes. */
1491 /* X may not be mentioned in the range (cond_earliest, jump]. */
1492 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1493 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1494 return NULL;
1496 /* A and B may not be modified in the range [cond_earliest, jump). */
1497 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1498 if (INSN_P (insn)
1499 && (modified_in_p (if_info->a, insn)
1500 || modified_in_p (if_info->b, insn)))
1501 return NULL;
1503 return cond;
1506 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1508 static int
1509 noce_try_minmax (struct noce_if_info *if_info)
1511 rtx cond, earliest, target, seq;
1512 enum rtx_code code, op;
1513 int unsignedp;
1515 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1516 if (no_new_pseudos)
1517 return FALSE;
1519 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1520 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1521 to get the target to tell us... */
1522 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1523 || HONOR_NANS (GET_MODE (if_info->x)))
1524 return FALSE;
1526 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1527 if (!cond)
1528 return FALSE;
1530 /* Verify the condition is of the form we expect, and canonicalize
1531 the comparison code. */
1532 code = GET_CODE (cond);
1533 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1535 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1536 return FALSE;
1538 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1540 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1541 return FALSE;
1542 code = swap_condition (code);
1544 else
1545 return FALSE;
1547 /* Determine what sort of operation this is. Note that the code is for
1548 a taken branch, so the code->operation mapping appears backwards. */
1549 switch (code)
1551 case LT:
1552 case LE:
1553 case UNLT:
1554 case UNLE:
1555 op = SMAX;
1556 unsignedp = 0;
1557 break;
1558 case GT:
1559 case GE:
1560 case UNGT:
1561 case UNGE:
1562 op = SMIN;
1563 unsignedp = 0;
1564 break;
1565 case LTU:
1566 case LEU:
1567 op = UMAX;
1568 unsignedp = 1;
1569 break;
1570 case GTU:
1571 case GEU:
1572 op = UMIN;
1573 unsignedp = 1;
1574 break;
1575 default:
1576 return FALSE;
1579 start_sequence ();
1581 target = expand_simple_binop (GET_MODE (if_info->x), op,
1582 if_info->a, if_info->b,
1583 if_info->x, unsignedp, OPTAB_WIDEN);
1584 if (! target)
1586 end_sequence ();
1587 return FALSE;
1589 if (target != if_info->x)
1590 noce_emit_move_insn (if_info->x, target);
1592 seq = end_ifcvt_sequence (if_info);
1593 if (!seq)
1594 return FALSE;
1596 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1597 if_info->cond = cond;
1598 if_info->cond_earliest = earliest;
1600 return TRUE;
1603 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1605 static int
1606 noce_try_abs (struct noce_if_info *if_info)
1608 rtx cond, earliest, target, seq, a, b, c;
1609 int negate;
1611 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1612 if (no_new_pseudos)
1613 return FALSE;
1615 /* Recognize A and B as constituting an ABS or NABS. */
1616 a = if_info->a;
1617 b = if_info->b;
1618 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1619 negate = 0;
1620 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1622 c = a; a = b; b = c;
1623 negate = 1;
1625 else
1626 return FALSE;
1628 cond = noce_get_alt_condition (if_info, b, &earliest);
1629 if (!cond)
1630 return FALSE;
1632 /* Verify the condition is of the form we expect. */
1633 if (rtx_equal_p (XEXP (cond, 0), b))
1634 c = XEXP (cond, 1);
1635 else if (rtx_equal_p (XEXP (cond, 1), b))
1636 c = XEXP (cond, 0);
1637 else
1638 return FALSE;
1640 /* Verify that C is zero. Search backward through the block for
1641 a REG_EQUAL note if necessary. */
1642 if (REG_P (c))
1644 rtx insn, note = NULL;
1645 for (insn = earliest;
1646 insn != BB_HEAD (if_info->test_bb);
1647 insn = PREV_INSN (insn))
1648 if (INSN_P (insn)
1649 && ((note = find_reg_note (insn, REG_EQUAL, c))
1650 || (note = find_reg_note (insn, REG_EQUIV, c))))
1651 break;
1652 if (! note)
1653 return FALSE;
1654 c = XEXP (note, 0);
1656 if (MEM_P (c)
1657 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1658 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1659 c = get_pool_constant (XEXP (c, 0));
1661 /* Work around funny ideas get_condition has wrt canonicalization.
1662 Note that these rtx constants are known to be CONST_INT, and
1663 therefore imply integer comparisons. */
1664 if (c == constm1_rtx && GET_CODE (cond) == GT)
1666 else if (c == const1_rtx && GET_CODE (cond) == LT)
1668 else if (c != CONST0_RTX (GET_MODE (b)))
1669 return FALSE;
1671 /* Determine what sort of operation this is. */
1672 switch (GET_CODE (cond))
1674 case LT:
1675 case LE:
1676 case UNLT:
1677 case UNLE:
1678 negate = !negate;
1679 break;
1680 case GT:
1681 case GE:
1682 case UNGT:
1683 case UNGE:
1684 break;
1685 default:
1686 return FALSE;
1689 start_sequence ();
1691 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1693 /* ??? It's a quandary whether cmove would be better here, especially
1694 for integers. Perhaps combine will clean things up. */
1695 if (target && negate)
1696 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1698 if (! target)
1700 end_sequence ();
1701 return FALSE;
1704 if (target != if_info->x)
1705 noce_emit_move_insn (if_info->x, target);
1707 seq = end_ifcvt_sequence (if_info);
1708 if (!seq)
1709 return FALSE;
1711 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1712 if_info->cond = cond;
1713 if_info->cond_earliest = earliest;
1715 return TRUE;
1718 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1720 static int
1721 noce_try_sign_mask (struct noce_if_info *if_info)
1723 rtx cond, t, m, c, seq;
1724 enum machine_mode mode;
1725 enum rtx_code code;
1727 if (no_new_pseudos)
1728 return FALSE;
1730 cond = if_info->cond;
1731 code = GET_CODE (cond);
1732 m = XEXP (cond, 0);
1733 c = XEXP (cond, 1);
1735 t = NULL_RTX;
1736 if (if_info->a == const0_rtx)
1738 if ((code == LT && c == const0_rtx)
1739 || (code == LE && c == constm1_rtx))
1740 t = if_info->b;
1742 else if (if_info->b == const0_rtx)
1744 if ((code == GE && c == const0_rtx)
1745 || (code == GT && c == constm1_rtx))
1746 t = if_info->a;
1749 if (! t || side_effects_p (t))
1750 return FALSE;
1752 /* We currently don't handle different modes. */
1753 mode = GET_MODE (t);
1754 if (GET_MODE (m) != mode)
1755 return FALSE;
1757 /* This is only profitable if T is cheap, or T is unconditionally
1758 executed/evaluated in the original insn sequence. */
1759 if (rtx_cost (t, SET) >= COSTS_N_INSNS (2)
1760 && (!if_info->b_unconditional
1761 || t != if_info->b))
1762 return FALSE;
1764 start_sequence ();
1765 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1766 "(signed) m >> 31" directly. This benefits targets with specialized
1767 insns to obtain the signmask, but still uses ashr_optab otherwise. */
1768 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1769 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1770 : NULL_RTX;
1772 if (!t)
1774 end_sequence ();
1775 return FALSE;
1778 noce_emit_move_insn (if_info->x, t);
1780 seq = end_ifcvt_sequence (if_info);
1781 if (!seq)
1782 return FALSE;
1784 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1785 return TRUE;
1789 /* Similar to get_condition, only the resulting condition must be
1790 valid at JUMP, instead of at EARLIEST. */
1792 static rtx
1793 noce_get_condition (rtx jump, rtx *earliest)
1795 rtx cond, set, tmp, insn;
1796 bool reverse;
1798 if (! any_condjump_p (jump))
1799 return NULL_RTX;
1801 set = pc_set (jump);
1803 /* If this branches to JUMP_LABEL when the condition is false,
1804 reverse the condition. */
1805 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1806 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1808 /* If the condition variable is a register and is MODE_INT, accept it. */
1810 cond = XEXP (SET_SRC (set), 0);
1811 tmp = XEXP (cond, 0);
1812 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1814 *earliest = jump;
1816 if (reverse)
1817 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1818 GET_MODE (cond), tmp, XEXP (cond, 1));
1819 return cond;
1822 /* Otherwise, fall back on canonicalize_condition to do the dirty
1823 work of manipulating MODE_CC values and COMPARE rtx codes. */
1825 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1826 false);
1827 if (!tmp)
1828 return NULL_RTX;
1830 /* We are going to insert code before JUMP, not before EARLIEST.
1831 We must therefore be certain that the given condition is valid
1832 at JUMP by virtue of not having been modified since. */
1833 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1834 if (INSN_P (insn) && modified_in_p (tmp, insn))
1835 break;
1836 if (insn == jump)
1837 return tmp;
1839 /* The condition was modified. See if we can get a partial result
1840 that doesn't follow all the reversals. Perhaps combine can fold
1841 them together later. */
1842 tmp = XEXP (tmp, 0);
1843 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1844 return NULL_RTX;
1845 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1846 false);
1847 if (!tmp)
1848 return NULL_RTX;
1850 /* For sanity's sake, re-validate the new result. */
1851 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1852 if (INSN_P (insn) && modified_in_p (tmp, insn))
1853 return NULL_RTX;
1855 return tmp;
1858 /* Return true if OP is ok for if-then-else processing. */
1860 static int
1861 noce_operand_ok (rtx op)
1863 /* We special-case memories, so handle any of them with
1864 no address side effects. */
1865 if (MEM_P (op))
1866 return ! side_effects_p (XEXP (op, 0));
1868 if (side_effects_p (op))
1869 return FALSE;
1871 return ! may_trap_p (op);
1874 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1875 without using conditional execution. Return TRUE if we were
1876 successful at converting the block. */
1878 static int
1879 noce_process_if_block (struct ce_if_block * ce_info)
1881 basic_block test_bb = ce_info->test_bb; /* test block */
1882 basic_block then_bb = ce_info->then_bb; /* THEN */
1883 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1884 struct noce_if_info if_info;
1885 rtx insn_a, insn_b;
1886 rtx set_a, set_b;
1887 rtx orig_x, x, a, b;
1888 rtx jump, cond;
1890 /* We're looking for patterns of the form
1892 (1) if (...) x = a; else x = b;
1893 (2) x = b; if (...) x = a;
1894 (3) if (...) x = a; // as if with an initial x = x.
1896 The later patterns require jumps to be more expensive.
1898 ??? For future expansion, look for multiple X in such patterns. */
1900 /* If test is comprised of && or || elements, don't handle it unless it is
1901 the special case of && elements without an ELSE block. */
1902 if (ce_info->num_multiple_test_blocks)
1904 if (else_bb || ! ce_info->and_and_p)
1905 return FALSE;
1907 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1908 ce_info->num_multiple_test_blocks = 0;
1909 ce_info->num_and_and_blocks = 0;
1910 ce_info->num_or_or_blocks = 0;
1913 /* If this is not a standard conditional jump, we can't parse it. */
1914 jump = BB_END (test_bb);
1915 cond = noce_get_condition (jump, &if_info.cond_earliest);
1916 if (! cond)
1917 return FALSE;
1919 /* If the conditional jump is more than just a conditional
1920 jump, then we can not do if-conversion on this block. */
1921 if (! onlyjump_p (jump))
1922 return FALSE;
1924 /* We must be comparing objects whose modes imply the size. */
1925 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1926 return FALSE;
1928 /* Look for one of the potential sets. */
1929 insn_a = first_active_insn (then_bb);
1930 if (! insn_a
1931 || insn_a != last_active_insn (then_bb, FALSE)
1932 || (set_a = single_set (insn_a)) == NULL_RTX)
1933 return FALSE;
1935 x = SET_DEST (set_a);
1936 a = SET_SRC (set_a);
1938 /* Look for the other potential set. Make sure we've got equivalent
1939 destinations. */
1940 /* ??? This is overconservative. Storing to two different mems is
1941 as easy as conditionally computing the address. Storing to a
1942 single mem merely requires a scratch memory to use as one of the
1943 destination addresses; often the memory immediately below the
1944 stack pointer is available for this. */
1945 set_b = NULL_RTX;
1946 if (else_bb)
1948 insn_b = first_active_insn (else_bb);
1949 if (! insn_b
1950 || insn_b != last_active_insn (else_bb, FALSE)
1951 || (set_b = single_set (insn_b)) == NULL_RTX
1952 || ! rtx_equal_p (x, SET_DEST (set_b)))
1953 return FALSE;
1955 else
1957 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1958 /* We're going to be moving the evaluation of B down from above
1959 COND_EARLIEST to JUMP. Make sure the relevant data is still
1960 intact. */
1961 if (! insn_b
1962 || !NONJUMP_INSN_P (insn_b)
1963 || (set_b = single_set (insn_b)) == NULL_RTX
1964 || ! rtx_equal_p (x, SET_DEST (set_b))
1965 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1966 || modified_between_p (SET_SRC (set_b),
1967 PREV_INSN (if_info.cond_earliest), jump)
1968 /* Likewise with X. In particular this can happen when
1969 noce_get_condition looks farther back in the instruction
1970 stream than one might expect. */
1971 || reg_overlap_mentioned_p (x, cond)
1972 || reg_overlap_mentioned_p (x, a)
1973 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1974 insn_b = set_b = NULL_RTX;
1977 /* If x has side effects then only the if-then-else form is safe to
1978 convert. But even in that case we would need to restore any notes
1979 (such as REG_INC) at then end. That can be tricky if
1980 noce_emit_move_insn expands to more than one insn, so disable the
1981 optimization entirely for now if there are side effects. */
1982 if (side_effects_p (x))
1983 return FALSE;
1985 b = (set_b ? SET_SRC (set_b) : x);
1987 /* Only operate on register destinations, and even then avoid extending
1988 the lifetime of hard registers on small register class machines. */
1989 orig_x = x;
1990 if (!REG_P (x)
1991 || (SMALL_REGISTER_CLASSES
1992 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1994 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1995 return FALSE;
1996 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1997 ? XEXP (x, 0) : x));
2000 /* Don't operate on sources that may trap or are volatile. */
2001 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2002 return FALSE;
2004 /* Set up the info block for our subroutines. */
2005 if_info.test_bb = test_bb;
2006 if_info.cond = cond;
2007 if_info.jump = jump;
2008 if_info.insn_a = insn_a;
2009 if_info.insn_b = insn_b;
2010 if_info.x = x;
2011 if_info.a = a;
2012 if_info.b = b;
2013 if_info.b_unconditional = else_bb == 0;
2015 /* Try optimizations in some approximation of a useful order. */
2016 /* ??? Should first look to see if X is live incoming at all. If it
2017 isn't, we don't need anything but an unconditional set. */
2019 /* Look and see if A and B are really the same. Avoid creating silly
2020 cmove constructs that no one will fix up later. */
2021 if (rtx_equal_p (a, b))
2023 /* If we have an INSN_B, we don't have to create any new rtl. Just
2024 move the instruction that we already have. If we don't have an
2025 INSN_B, that means that A == X, and we've got a noop move. In
2026 that case don't do anything and let the code below delete INSN_A. */
2027 if (insn_b && else_bb)
2029 rtx note;
2031 if (else_bb && insn_b == BB_END (else_bb))
2032 BB_END (else_bb) = PREV_INSN (insn_b);
2033 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2035 /* If there was a REG_EQUAL note, delete it since it may have been
2036 true due to this insn being after a jump. */
2037 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2038 remove_note (insn_b, note);
2040 insn_b = NULL_RTX;
2042 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2043 x must be executed twice. */
2044 else if (insn_b && side_effects_p (orig_x))
2045 return FALSE;
2047 x = orig_x;
2048 goto success;
2051 /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2052 for most optimizations if writing to x may trap, i.e. it's a memory
2053 other than a static var or a stack slot. */
2054 if (! set_b
2055 && MEM_P (orig_x)
2056 && ! MEM_NOTRAP_P (orig_x)
2057 && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
2059 if (HAVE_conditional_move)
2061 if (noce_try_cmove (&if_info))
2062 goto success;
2063 if (! HAVE_conditional_execution
2064 && noce_try_cmove_arith (&if_info))
2065 goto success;
2067 return FALSE;
2070 if (noce_try_move (&if_info))
2071 goto success;
2072 if (noce_try_store_flag (&if_info))
2073 goto success;
2074 if (noce_try_minmax (&if_info))
2075 goto success;
2076 if (noce_try_abs (&if_info))
2077 goto success;
2078 if (HAVE_conditional_move
2079 && noce_try_cmove (&if_info))
2080 goto success;
2081 if (! HAVE_conditional_execution)
2083 if (noce_try_store_flag_constants (&if_info))
2084 goto success;
2085 if (noce_try_addcc (&if_info))
2086 goto success;
2087 if (noce_try_store_flag_mask (&if_info))
2088 goto success;
2089 if (HAVE_conditional_move
2090 && noce_try_cmove_arith (&if_info))
2091 goto success;
2092 if (noce_try_sign_mask (&if_info))
2093 goto success;
2096 return FALSE;
2098 success:
2099 /* The original sets may now be killed. */
2100 delete_insn (insn_a);
2102 /* Several special cases here: First, we may have reused insn_b above,
2103 in which case insn_b is now NULL. Second, we want to delete insn_b
2104 if it came from the ELSE block, because follows the now correct
2105 write that appears in the TEST block. However, if we got insn_b from
2106 the TEST block, it may in fact be loading data needed for the comparison.
2107 We'll let life_analysis remove the insn if it's really dead. */
2108 if (insn_b && else_bb)
2109 delete_insn (insn_b);
2111 /* The new insns will have been inserted immediately before the jump. We
2112 should be able to remove the jump with impunity, but the condition itself
2113 may have been modified by gcse to be shared across basic blocks. */
2114 delete_insn (jump);
2116 /* If we used a temporary, fix it up now. */
2117 if (orig_x != x)
2119 start_sequence ();
2120 noce_emit_move_insn (orig_x, x);
2121 insn_b = get_insns ();
2122 set_used_flags (orig_x);
2123 unshare_all_rtl_in_chain (insn_b);
2124 end_sequence ();
2126 emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2129 /* Merge the blocks! */
2130 merge_if_block (ce_info);
2132 return TRUE;
2135 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2136 straight line code. Return true if successful. */
2138 static int
2139 process_if_block (struct ce_if_block * ce_info)
2141 if (! reload_completed
2142 && noce_process_if_block (ce_info))
2143 return TRUE;
2145 if (HAVE_conditional_execution && reload_completed)
2147 /* If we have && and || tests, try to first handle combining the && and
2148 || tests into the conditional code, and if that fails, go back and
2149 handle it without the && and ||, which at present handles the && case
2150 if there was no ELSE block. */
2151 if (cond_exec_process_if_block (ce_info, TRUE))
2152 return TRUE;
2154 if (ce_info->num_multiple_test_blocks)
2156 cancel_changes (0);
2158 if (cond_exec_process_if_block (ce_info, FALSE))
2159 return TRUE;
2163 return FALSE;
2166 /* Merge the blocks and mark for local life update. */
2168 static void
2169 merge_if_block (struct ce_if_block * ce_info)
2171 basic_block test_bb = ce_info->test_bb; /* last test block */
2172 basic_block then_bb = ce_info->then_bb; /* THEN */
2173 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2174 basic_block join_bb = ce_info->join_bb; /* join block */
2175 basic_block combo_bb;
2177 /* All block merging is done into the lower block numbers. */
2179 combo_bb = test_bb;
2181 /* Merge any basic blocks to handle && and || subtests. Each of
2182 the blocks are on the fallthru path from the predecessor block. */
2183 if (ce_info->num_multiple_test_blocks > 0)
2185 basic_block bb = test_bb;
2186 basic_block last_test_bb = ce_info->last_test_bb;
2187 basic_block fallthru = block_fallthru (bb);
2191 bb = fallthru;
2192 fallthru = block_fallthru (bb);
2193 merge_blocks (combo_bb, bb);
2194 num_true_changes++;
2196 while (bb != last_test_bb);
2199 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2200 label, but it might if there were || tests. That label's count should be
2201 zero, and it normally should be removed. */
2203 if (then_bb)
2205 if (combo_bb->global_live_at_end)
2206 COPY_REG_SET (combo_bb->global_live_at_end,
2207 then_bb->global_live_at_end);
2208 merge_blocks (combo_bb, then_bb);
2209 num_true_changes++;
2212 /* The ELSE block, if it existed, had a label. That label count
2213 will almost always be zero, but odd things can happen when labels
2214 get their addresses taken. */
2215 if (else_bb)
2217 merge_blocks (combo_bb, else_bb);
2218 num_true_changes++;
2221 /* If there was no join block reported, that means it was not adjacent
2222 to the others, and so we cannot merge them. */
2224 if (! join_bb)
2226 rtx last = BB_END (combo_bb);
2228 /* The outgoing edge for the current COMBO block should already
2229 be correct. Verify this. */
2230 if (combo_bb->succ == NULL_EDGE)
2232 if (find_reg_note (last, REG_NORETURN, NULL))
2234 else if (NONJUMP_INSN_P (last)
2235 && GET_CODE (PATTERN (last)) == TRAP_IF
2236 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2238 else
2239 abort ();
2242 /* There should still be something at the end of the THEN or ELSE
2243 blocks taking us to our final destination. */
2244 else if (JUMP_P (last))
2246 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2247 && CALL_P (last)
2248 && SIBLING_CALL_P (last))
2250 else if ((combo_bb->succ->flags & EDGE_EH)
2251 && can_throw_internal (last))
2253 else
2254 abort ();
2257 /* The JOIN block may have had quite a number of other predecessors too.
2258 Since we've already merged the TEST, THEN and ELSE blocks, we should
2259 have only one remaining edge from our if-then-else diamond. If there
2260 is more than one remaining edge, it must come from elsewhere. There
2261 may be zero incoming edges if the THEN block didn't actually join
2262 back up (as with a call to abort). */
2263 else if ((join_bb->pred == NULL
2264 || join_bb->pred->pred_next == NULL)
2265 && join_bb != EXIT_BLOCK_PTR)
2267 /* We can merge the JOIN. */
2268 if (combo_bb->global_live_at_end)
2269 COPY_REG_SET (combo_bb->global_live_at_end,
2270 join_bb->global_live_at_end);
2272 merge_blocks (combo_bb, join_bb);
2273 num_true_changes++;
2275 else
2277 /* We cannot merge the JOIN. */
2279 /* The outgoing edge for the current COMBO block should already
2280 be correct. Verify this. */
2281 if (combo_bb->succ->succ_next != NULL_EDGE
2282 || combo_bb->succ->dest != join_bb)
2283 abort ();
2285 /* Remove the jump and cruft from the end of the COMBO block. */
2286 if (join_bb != EXIT_BLOCK_PTR)
2287 tidy_fallthru_edge (combo_bb->succ);
2290 num_updated_if_blocks++;
2293 /* Find a block ending in a simple IF condition and try to transform it
2294 in some way. When converting a multi-block condition, put the new code
2295 in the first such block and delete the rest. Return a pointer to this
2296 first block if some transformation was done. Return NULL otherwise. */
2298 static basic_block
2299 find_if_header (basic_block test_bb, int pass)
2301 ce_if_block_t ce_info;
2302 edge then_edge;
2303 edge else_edge;
2305 /* The kind of block we're looking for has exactly two successors. */
2306 if ((then_edge = test_bb->succ) == NULL_EDGE
2307 || (else_edge = then_edge->succ_next) == NULL_EDGE
2308 || else_edge->succ_next != NULL_EDGE)
2309 return NULL;
2311 /* Neither edge should be abnormal. */
2312 if ((then_edge->flags & EDGE_COMPLEX)
2313 || (else_edge->flags & EDGE_COMPLEX))
2314 return NULL;
2316 /* Nor exit the loop. */
2317 if ((then_edge->flags & EDGE_LOOP_EXIT)
2318 || (else_edge->flags & EDGE_LOOP_EXIT))
2319 return NULL;
2321 /* The THEN edge is canonically the one that falls through. */
2322 if (then_edge->flags & EDGE_FALLTHRU)
2324 else if (else_edge->flags & EDGE_FALLTHRU)
2326 edge e = else_edge;
2327 else_edge = then_edge;
2328 then_edge = e;
2330 else
2331 /* Otherwise this must be a multiway branch of some sort. */
2332 return NULL;
2334 memset (&ce_info, '\0', sizeof (ce_info));
2335 ce_info.test_bb = test_bb;
2336 ce_info.then_bb = then_edge->dest;
2337 ce_info.else_bb = else_edge->dest;
2338 ce_info.pass = pass;
2340 #ifdef IFCVT_INIT_EXTRA_FIELDS
2341 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2342 #endif
2344 if (find_if_block (&ce_info))
2345 goto success;
2347 if (HAVE_trap && HAVE_conditional_trap
2348 && find_cond_trap (test_bb, then_edge, else_edge))
2349 goto success;
2351 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2352 && (! HAVE_conditional_execution || reload_completed))
2354 if (find_if_case_1 (test_bb, then_edge, else_edge))
2355 goto success;
2356 if (find_if_case_2 (test_bb, then_edge, else_edge))
2357 goto success;
2360 return NULL;
2362 success:
2363 if (dump_file)
2364 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2365 return ce_info.test_bb;
2368 /* Return true if a block has two edges, one of which falls through to the next
2369 block, and the other jumps to a specific block, so that we can tell if the
2370 block is part of an && test or an || test. Returns either -1 or the number
2371 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2373 static int
2374 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2376 edge cur_edge;
2377 int fallthru_p = FALSE;
2378 int jump_p = FALSE;
2379 rtx insn;
2380 rtx end;
2381 int n_insns = 0;
2383 if (!cur_bb || !target_bb)
2384 return -1;
2386 /* If no edges, obviously it doesn't jump or fallthru. */
2387 if (cur_bb->succ == NULL_EDGE)
2388 return FALSE;
2390 for (cur_edge = cur_bb->succ;
2391 cur_edge != NULL_EDGE;
2392 cur_edge = cur_edge->succ_next)
2394 if (cur_edge->flags & EDGE_COMPLEX)
2395 /* Anything complex isn't what we want. */
2396 return -1;
2398 else if (cur_edge->flags & EDGE_FALLTHRU)
2399 fallthru_p = TRUE;
2401 else if (cur_edge->dest == target_bb)
2402 jump_p = TRUE;
2404 else
2405 return -1;
2408 if ((jump_p & fallthru_p) == 0)
2409 return -1;
2411 /* Don't allow calls in the block, since this is used to group && and ||
2412 together for conditional execution support. ??? we should support
2413 conditional execution support across calls for IA-64 some day, but
2414 for now it makes the code simpler. */
2415 end = BB_END (cur_bb);
2416 insn = BB_HEAD (cur_bb);
2418 while (insn != NULL_RTX)
2420 if (CALL_P (insn))
2421 return -1;
2423 if (INSN_P (insn)
2424 && !JUMP_P (insn)
2425 && GET_CODE (PATTERN (insn)) != USE
2426 && GET_CODE (PATTERN (insn)) != CLOBBER)
2427 n_insns++;
2429 if (insn == end)
2430 break;
2432 insn = NEXT_INSN (insn);
2435 return n_insns;
2438 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2439 block. If so, we'll try to convert the insns to not require the branch.
2440 Return TRUE if we were successful at converting the block. */
2442 static int
2443 find_if_block (struct ce_if_block * ce_info)
2445 basic_block test_bb = ce_info->test_bb;
2446 basic_block then_bb = ce_info->then_bb;
2447 basic_block else_bb = ce_info->else_bb;
2448 basic_block join_bb = NULL_BLOCK;
2449 edge then_succ = then_bb->succ;
2450 edge else_succ = else_bb->succ;
2451 int then_predecessors;
2452 int else_predecessors;
2453 edge cur_edge;
2454 basic_block next;
2456 ce_info->last_test_bb = test_bb;
2458 /* Discover if any fall through predecessors of the current test basic block
2459 were && tests (which jump to the else block) or || tests (which jump to
2460 the then block). */
2461 if (HAVE_conditional_execution && reload_completed
2462 && test_bb->pred != NULL_EDGE
2463 && test_bb->pred->pred_next == NULL_EDGE
2464 && test_bb->pred->flags == EDGE_FALLTHRU)
2466 basic_block bb = test_bb->pred->src;
2467 basic_block target_bb;
2468 int max_insns = MAX_CONDITIONAL_EXECUTE;
2469 int n_insns;
2471 /* Determine if the preceding block is an && or || block. */
2472 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2474 ce_info->and_and_p = TRUE;
2475 target_bb = else_bb;
2477 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2479 ce_info->and_and_p = FALSE;
2480 target_bb = then_bb;
2482 else
2483 target_bb = NULL_BLOCK;
2485 if (target_bb && n_insns <= max_insns)
2487 int total_insns = 0;
2488 int blocks = 0;
2490 ce_info->last_test_bb = test_bb;
2492 /* Found at least one && or || block, look for more. */
2495 ce_info->test_bb = test_bb = bb;
2496 total_insns += n_insns;
2497 blocks++;
2499 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2500 break;
2502 bb = bb->pred->src;
2503 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2505 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2507 ce_info->num_multiple_test_blocks = blocks;
2508 ce_info->num_multiple_test_insns = total_insns;
2510 if (ce_info->and_and_p)
2511 ce_info->num_and_and_blocks = blocks;
2512 else
2513 ce_info->num_or_or_blocks = blocks;
2517 /* Count the number of edges the THEN and ELSE blocks have. */
2518 then_predecessors = 0;
2519 for (cur_edge = then_bb->pred;
2520 cur_edge != NULL_EDGE;
2521 cur_edge = cur_edge->pred_next)
2523 then_predecessors++;
2524 if (cur_edge->flags & EDGE_COMPLEX)
2525 return FALSE;
2528 else_predecessors = 0;
2529 for (cur_edge = else_bb->pred;
2530 cur_edge != NULL_EDGE;
2531 cur_edge = cur_edge->pred_next)
2533 else_predecessors++;
2534 if (cur_edge->flags & EDGE_COMPLEX)
2535 return FALSE;
2538 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2539 other than any || blocks which jump to the THEN block. */
2540 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2541 return FALSE;
2543 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2544 if (then_succ != NULL_EDGE
2545 && (then_succ->succ_next != NULL_EDGE
2546 || (then_succ->flags & EDGE_COMPLEX)
2547 || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2548 return FALSE;
2550 /* If the THEN block has no successors, conditional execution can still
2551 make a conditional call. Don't do this unless the ELSE block has
2552 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2553 Check for the last insn of the THEN block being an indirect jump, which
2554 is listed as not having any successors, but confuses the rest of the CE
2555 code processing. ??? we should fix this in the future. */
2556 if (then_succ == NULL)
2558 if (else_bb->pred->pred_next == NULL_EDGE)
2560 rtx last_insn = BB_END (then_bb);
2562 while (last_insn
2563 && NOTE_P (last_insn)
2564 && last_insn != BB_HEAD (then_bb))
2565 last_insn = PREV_INSN (last_insn);
2567 if (last_insn
2568 && JUMP_P (last_insn)
2569 && ! simplejump_p (last_insn))
2570 return FALSE;
2572 join_bb = else_bb;
2573 else_bb = NULL_BLOCK;
2575 else
2576 return FALSE;
2579 /* If the THEN block's successor is the other edge out of the TEST block,
2580 then we have an IF-THEN combo without an ELSE. */
2581 else if (then_succ->dest == else_bb)
2583 join_bb = else_bb;
2584 else_bb = NULL_BLOCK;
2587 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2588 has exactly one predecessor and one successor, and the outgoing edge
2589 is not complex, then we have an IF-THEN-ELSE combo. */
2590 else if (else_succ != NULL_EDGE
2591 && then_succ->dest == else_succ->dest
2592 && else_bb->pred->pred_next == NULL_EDGE
2593 && else_succ->succ_next == NULL_EDGE
2594 && ! (else_succ->flags & EDGE_COMPLEX)
2595 && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2596 join_bb = else_succ->dest;
2598 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2599 else
2600 return FALSE;
2602 num_possible_if_blocks++;
2604 if (dump_file)
2606 fprintf (dump_file,
2607 "\nIF-THEN%s block found, pass %d, start block %d "
2608 "[insn %d], then %d [%d]",
2609 (else_bb) ? "-ELSE" : "",
2610 ce_info->pass,
2611 test_bb->index,
2612 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2613 then_bb->index,
2614 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2616 if (else_bb)
2617 fprintf (dump_file, ", else %d [%d]",
2618 else_bb->index,
2619 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2621 fprintf (dump_file, ", join %d [%d]",
2622 join_bb->index,
2623 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2625 if (ce_info->num_multiple_test_blocks > 0)
2626 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
2627 ce_info->num_multiple_test_blocks,
2628 (ce_info->and_and_p) ? "&&" : "||",
2629 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2630 ce_info->last_test_bb->index,
2631 ((BB_HEAD (ce_info->last_test_bb))
2632 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2633 : -1));
2635 fputc ('\n', dump_file);
2638 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2639 first condition for free, since we've already asserted that there's a
2640 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2641 we checked the FALLTHRU flag, those are already adjacent to the last IF
2642 block. */
2643 /* ??? As an enhancement, move the ELSE block. Have to deal with
2644 BLOCK notes, if by no other means than aborting the merge if they
2645 exist. Sticky enough I don't want to think about it now. */
2646 next = then_bb;
2647 if (else_bb && (next = next->next_bb) != else_bb)
2648 return FALSE;
2649 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2651 if (else_bb)
2652 join_bb = NULL;
2653 else
2654 return FALSE;
2657 /* Do the real work. */
2658 ce_info->else_bb = else_bb;
2659 ce_info->join_bb = join_bb;
2661 return process_if_block (ce_info);
2664 /* Convert a branch over a trap, or a branch
2665 to a trap, into a conditional trap. */
2667 static int
2668 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2670 basic_block then_bb = then_edge->dest;
2671 basic_block else_bb = else_edge->dest;
2672 basic_block other_bb, trap_bb;
2673 rtx trap, jump, cond, cond_earliest, seq;
2674 enum rtx_code code;
2676 /* Locate the block with the trap instruction. */
2677 /* ??? While we look for no successors, we really ought to allow
2678 EH successors. Need to fix merge_if_block for that to work. */
2679 if ((trap = block_has_only_trap (then_bb)) != NULL)
2680 trap_bb = then_bb, other_bb = else_bb;
2681 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2682 trap_bb = else_bb, other_bb = then_bb;
2683 else
2684 return FALSE;
2686 if (dump_file)
2688 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2689 test_bb->index, trap_bb->index);
2692 /* If this is not a standard conditional jump, we can't parse it. */
2693 jump = BB_END (test_bb);
2694 cond = noce_get_condition (jump, &cond_earliest);
2695 if (! cond)
2696 return FALSE;
2698 /* If the conditional jump is more than just a conditional jump, then
2699 we can not do if-conversion on this block. */
2700 if (! onlyjump_p (jump))
2701 return FALSE;
2703 /* We must be comparing objects whose modes imply the size. */
2704 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2705 return FALSE;
2707 /* Reverse the comparison code, if necessary. */
2708 code = GET_CODE (cond);
2709 if (then_bb == trap_bb)
2711 code = reversed_comparison_code (cond, jump);
2712 if (code == UNKNOWN)
2713 return FALSE;
2716 /* Attempt to generate the conditional trap. */
2717 seq = gen_cond_trap (code, XEXP (cond, 0),
2718 XEXP (cond, 1),
2719 TRAP_CODE (PATTERN (trap)));
2720 if (seq == NULL)
2721 return FALSE;
2723 num_true_changes++;
2725 /* Emit the new insns before cond_earliest. */
2726 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2728 /* Delete the trap block if possible. */
2729 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2730 if (trap_bb->pred == NULL)
2731 delete_basic_block (trap_bb);
2733 /* If the non-trap block and the test are now adjacent, merge them.
2734 Otherwise we must insert a direct branch. */
2735 if (test_bb->next_bb == other_bb)
2737 struct ce_if_block new_ce_info;
2738 delete_insn (jump);
2739 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2740 new_ce_info.test_bb = test_bb;
2741 new_ce_info.then_bb = NULL;
2742 new_ce_info.else_bb = NULL;
2743 new_ce_info.join_bb = other_bb;
2744 merge_if_block (&new_ce_info);
2746 else
2748 rtx lab, newjump;
2750 lab = JUMP_LABEL (jump);
2751 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2752 LABEL_NUSES (lab) += 1;
2753 JUMP_LABEL (newjump) = lab;
2754 emit_barrier_after (newjump);
2756 delete_insn (jump);
2759 return TRUE;
2762 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2763 return it. */
2765 static rtx
2766 block_has_only_trap (basic_block bb)
2768 rtx trap;
2770 /* We're not the exit block. */
2771 if (bb == EXIT_BLOCK_PTR)
2772 return NULL_RTX;
2774 /* The block must have no successors. */
2775 if (bb->succ)
2776 return NULL_RTX;
2778 /* The only instruction in the THEN block must be the trap. */
2779 trap = first_active_insn (bb);
2780 if (! (trap == BB_END (bb)
2781 && GET_CODE (PATTERN (trap)) == TRAP_IF
2782 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2783 return NULL_RTX;
2785 return trap;
2788 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2789 transformable, but not necessarily the other. There need be no
2790 JOIN block.
2792 Return TRUE if we were successful at converting the block.
2794 Cases we'd like to look at:
2797 if (test) goto over; // x not live
2798 x = a;
2799 goto label;
2800 over:
2802 becomes
2804 x = a;
2805 if (! test) goto label;
2808 if (test) goto E; // x not live
2809 x = big();
2810 goto L;
2812 x = b;
2813 goto M;
2815 becomes
2817 x = b;
2818 if (test) goto M;
2819 x = big();
2820 goto L;
2822 (3) // This one's really only interesting for targets that can do
2823 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2824 // it results in multiple branches on a cache line, which often
2825 // does not sit well with predictors.
2827 if (test1) goto E; // predicted not taken
2828 x = a;
2829 if (test2) goto F;
2832 x = b;
2835 becomes
2837 x = a;
2838 if (test1) goto E;
2839 if (test2) goto F;
2841 Notes:
2843 (A) Don't do (2) if the branch is predicted against the block we're
2844 eliminating. Do it anyway if we can eliminate a branch; this requires
2845 that the sole successor of the eliminated block postdominate the other
2846 side of the if.
2848 (B) With CE, on (3) we can steal from both sides of the if, creating
2850 if (test1) x = a;
2851 if (!test1) x = b;
2852 if (test1) goto J;
2853 if (test2) goto F;
2857 Again, this is most useful if J postdominates.
2859 (C) CE substitutes for helpful life information.
2861 (D) These heuristics need a lot of work. */
2863 /* Tests for case 1 above. */
2865 static int
2866 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2868 basic_block then_bb = then_edge->dest;
2869 basic_block else_bb = else_edge->dest, new_bb;
2870 edge then_succ = then_bb->succ;
2871 int then_bb_index;
2873 /* If we are partitioning hot/cold basic blocks, we don't want to
2874 mess up unconditional or indirect jumps that cross between hot
2875 and cold sections. */
2877 if (flag_reorder_blocks_and_partition
2878 && ((BB_END (then_bb)
2879 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
2880 || (BB_END (else_bb)
2881 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
2882 NULL_RTX))))
2883 return FALSE;
2885 /* THEN has one successor. */
2886 if (!then_succ || then_succ->succ_next != NULL)
2887 return FALSE;
2889 /* THEN does not fall through, but is not strange either. */
2890 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2891 return FALSE;
2893 /* THEN has one predecessor. */
2894 if (then_bb->pred->pred_next != NULL)
2895 return FALSE;
2897 /* THEN must do something. */
2898 if (forwarder_block_p (then_bb))
2899 return FALSE;
2901 num_possible_if_blocks++;
2902 if (dump_file)
2903 fprintf (dump_file,
2904 "\nIF-CASE-1 found, start %d, then %d\n",
2905 test_bb->index, then_bb->index);
2907 /* THEN is small. */
2908 if (total_bb_rtx_cost (then_bb) >= COSTS_N_INSNS (BRANCH_COST))
2909 return FALSE;
2911 /* Registers set are dead, or are predicable. */
2912 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2913 then_bb->succ->dest, 1))
2914 return FALSE;
2916 /* Conversion went ok, including moving the insns and fixing up the
2917 jump. Adjust the CFG to match. */
2919 bitmap_operation (test_bb->global_live_at_end,
2920 else_bb->global_live_at_start,
2921 then_bb->global_live_at_end, BITMAP_IOR);
2923 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2924 then_bb_index = then_bb->index;
2925 delete_basic_block (then_bb);
2927 /* Make rest of code believe that the newly created block is the THEN_BB
2928 block we removed. */
2929 if (new_bb)
2931 new_bb->index = then_bb_index;
2932 BASIC_BLOCK (then_bb_index) = new_bb;
2934 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2935 later. */
2937 num_true_changes++;
2938 num_updated_if_blocks++;
2940 return TRUE;
2943 /* Test for case 2 above. */
2945 static int
2946 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2948 basic_block then_bb = then_edge->dest;
2949 basic_block else_bb = else_edge->dest;
2950 edge else_succ = else_bb->succ;
2951 rtx note;
2953 /* If we are partitioning hot/cold basic blocks, we don't want to
2954 mess up unconditional or indirect jumps that cross between hot
2955 and cold sections. */
2957 if (flag_reorder_blocks_and_partition
2958 && ((BB_END (then_bb)
2959 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
2960 || (BB_END (else_bb)
2961 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
2962 NULL_RTX))))
2963 return FALSE;
2965 /* ELSE has one successor. */
2966 if (!else_succ || else_succ->succ_next != NULL)
2967 return FALSE;
2969 /* ELSE outgoing edge is not complex. */
2970 if (else_succ->flags & EDGE_COMPLEX)
2971 return FALSE;
2973 /* ELSE has one predecessor. */
2974 if (else_bb->pred->pred_next != NULL)
2975 return FALSE;
2977 /* THEN is not EXIT. */
2978 if (then_bb->index < 0)
2979 return FALSE;
2981 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2982 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
2983 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2985 else if (else_succ->dest->index < 0
2986 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
2987 else_succ->dest))
2989 else
2990 return FALSE;
2992 num_possible_if_blocks++;
2993 if (dump_file)
2994 fprintf (dump_file,
2995 "\nIF-CASE-2 found, start %d, else %d\n",
2996 test_bb->index, else_bb->index);
2998 /* ELSE is small. */
2999 if (total_bb_rtx_cost (else_bb) >= COSTS_N_INSNS (BRANCH_COST))
3000 return FALSE;
3002 /* Registers set are dead, or are predicable. */
3003 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3004 return FALSE;
3006 /* Conversion went ok, including moving the insns and fixing up the
3007 jump. Adjust the CFG to match. */
3009 bitmap_operation (test_bb->global_live_at_end,
3010 then_bb->global_live_at_start,
3011 else_bb->global_live_at_end, BITMAP_IOR);
3013 delete_basic_block (else_bb);
3015 num_true_changes++;
3016 num_updated_if_blocks++;
3018 /* ??? We may now fallthru from one of THEN's successors into a join
3019 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3021 return TRUE;
3024 /* A subroutine of dead_or_predicable called through for_each_rtx.
3025 Return 1 if a memory is found. */
3027 static int
3028 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3030 return MEM_P (*px);
3033 /* Used by the code above to perform the actual rtl transformations.
3034 Return TRUE if successful.
3036 TEST_BB is the block containing the conditional branch. MERGE_BB
3037 is the block containing the code to manipulate. NEW_DEST is the
3038 label TEST_BB should be branching to after the conversion.
3039 REVERSEP is true if the sense of the branch should be reversed. */
3041 static int
3042 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3043 basic_block other_bb, basic_block new_dest, int reversep)
3045 rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3047 jump = BB_END (test_bb);
3049 /* Find the extent of the real code in the merge block. */
3050 head = BB_HEAD (merge_bb);
3051 end = BB_END (merge_bb);
3053 if (LABEL_P (head))
3054 head = NEXT_INSN (head);
3055 if (NOTE_P (head))
3057 if (head == end)
3059 head = end = NULL_RTX;
3060 goto no_body;
3062 head = NEXT_INSN (head);
3065 if (JUMP_P (end))
3067 if (head == end)
3069 head = end = NULL_RTX;
3070 goto no_body;
3072 end = PREV_INSN (end);
3075 /* Disable handling dead code by conditional execution if the machine needs
3076 to do anything funny with the tests, etc. */
3077 #ifndef IFCVT_MODIFY_TESTS
3078 if (HAVE_conditional_execution)
3080 /* In the conditional execution case, we have things easy. We know
3081 the condition is reversible. We don't have to check life info
3082 because we're going to conditionally execute the code anyway.
3083 All that's left is making sure the insns involved can actually
3084 be predicated. */
3086 rtx cond, prob_val;
3088 cond = cond_exec_get_condition (jump);
3089 if (! cond)
3090 return FALSE;
3092 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3093 if (prob_val)
3094 prob_val = XEXP (prob_val, 0);
3096 if (reversep)
3098 enum rtx_code rev = reversed_comparison_code (cond, jump);
3099 if (rev == UNKNOWN)
3100 return FALSE;
3101 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3102 XEXP (cond, 1));
3103 if (prob_val)
3104 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3107 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3108 prob_val, 0))
3109 goto cancel;
3111 earliest = jump;
3113 else
3114 #endif
3116 /* In the non-conditional execution case, we have to verify that there
3117 are no trapping operations, no calls, no references to memory, and
3118 that any registers modified are dead at the branch site. */
3120 rtx insn, cond, prev;
3121 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
3122 regset merge_set, tmp, test_live, test_set;
3123 struct propagate_block_info *pbi;
3124 int i, fail = 0;
3126 /* Check for no calls or trapping operations. */
3127 for (insn = head; ; insn = NEXT_INSN (insn))
3129 if (CALL_P (insn))
3130 return FALSE;
3131 if (INSN_P (insn))
3133 if (may_trap_p (PATTERN (insn)))
3134 return FALSE;
3136 /* ??? Even non-trapping memories such as stack frame
3137 references must be avoided. For stores, we collect
3138 no lifetime info; for reads, we'd have to assert
3139 true_dependence false against every store in the
3140 TEST range. */
3141 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3142 return FALSE;
3144 if (insn == end)
3145 break;
3148 if (! any_condjump_p (jump))
3149 return FALSE;
3151 /* Find the extent of the conditional. */
3152 cond = noce_get_condition (jump, &earliest);
3153 if (! cond)
3154 return FALSE;
3156 /* Collect:
3157 MERGE_SET = set of registers set in MERGE_BB
3158 TEST_LIVE = set of registers live at EARLIEST
3159 TEST_SET = set of registers set between EARLIEST and the
3160 end of the block. */
3162 tmp = INITIALIZE_REG_SET (tmp_head);
3163 merge_set = INITIALIZE_REG_SET (merge_set_head);
3164 test_live = INITIALIZE_REG_SET (test_live_head);
3165 test_set = INITIALIZE_REG_SET (test_set_head);
3167 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3168 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3169 since we've already asserted that MERGE_BB is small. */
3170 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3172 /* For small register class machines, don't lengthen lifetimes of
3173 hard registers before reload. */
3174 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3176 EXECUTE_IF_SET_IN_BITMAP
3177 (merge_set, 0, i,
3179 if (i < FIRST_PSEUDO_REGISTER
3180 && ! fixed_regs[i]
3181 && ! global_regs[i])
3182 fail = 1;
3186 /* For TEST, we're interested in a range of insns, not a whole block.
3187 Moreover, we're interested in the insns live from OTHER_BB. */
3189 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3190 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3193 for (insn = jump; ; insn = prev)
3195 prev = propagate_one_insn (pbi, insn);
3196 if (insn == earliest)
3197 break;
3200 free_propagate_block_info (pbi);
3202 /* We can perform the transformation if
3203 MERGE_SET & (TEST_SET | TEST_LIVE)
3205 TEST_SET & merge_bb->global_live_at_start
3206 are empty. */
3208 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3209 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3210 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3212 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3213 BITMAP_AND);
3214 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3216 FREE_REG_SET (tmp);
3217 FREE_REG_SET (merge_set);
3218 FREE_REG_SET (test_live);
3219 FREE_REG_SET (test_set);
3221 if (fail)
3222 return FALSE;
3225 no_body:
3226 /* We don't want to use normal invert_jump or redirect_jump because
3227 we don't want to delete_insn called. Also, we want to do our own
3228 change group management. */
3230 old_dest = JUMP_LABEL (jump);
3231 if (other_bb != new_dest)
3233 new_label = block_label (new_dest);
3234 if (reversep
3235 ? ! invert_jump_1 (jump, new_label)
3236 : ! redirect_jump_1 (jump, new_label))
3237 goto cancel;
3240 if (! apply_change_group ())
3241 return FALSE;
3243 if (other_bb != new_dest)
3245 if (old_dest)
3246 LABEL_NUSES (old_dest) -= 1;
3247 if (new_label)
3248 LABEL_NUSES (new_label) += 1;
3249 JUMP_LABEL (jump) = new_label;
3250 if (reversep)
3251 invert_br_probabilities (jump);
3253 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3254 if (reversep)
3256 gcov_type count, probability;
3257 count = BRANCH_EDGE (test_bb)->count;
3258 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3259 FALLTHRU_EDGE (test_bb)->count = count;
3260 probability = BRANCH_EDGE (test_bb)->probability;
3261 BRANCH_EDGE (test_bb)->probability
3262 = FALLTHRU_EDGE (test_bb)->probability;
3263 FALLTHRU_EDGE (test_bb)->probability = probability;
3264 update_br_prob_note (test_bb);
3268 /* Move the insns out of MERGE_BB to before the branch. */
3269 if (head != NULL)
3271 if (end == BB_END (merge_bb))
3272 BB_END (merge_bb) = PREV_INSN (head);
3274 if (squeeze_notes (&head, &end))
3275 return TRUE;
3277 reorder_insns (head, end, PREV_INSN (earliest));
3280 /* Remove the jump and edge if we can. */
3281 if (other_bb == new_dest)
3283 delete_insn (jump);
3284 remove_edge (BRANCH_EDGE (test_bb));
3285 /* ??? Can't merge blocks here, as then_bb is still in use.
3286 At minimum, the merge will get done just before bb-reorder. */
3289 return TRUE;
3291 cancel:
3292 cancel_changes (0);
3293 return FALSE;
3296 /* Main entry point for all if-conversion. */
3298 void
3299 if_convert (int x_life_data_ok)
3301 basic_block bb;
3302 int pass;
3304 num_possible_if_blocks = 0;
3305 num_updated_if_blocks = 0;
3306 num_true_changes = 0;
3307 life_data_ok = (x_life_data_ok != 0);
3309 if ((! targetm.cannot_modify_jumps_p ())
3310 && (!flag_reorder_blocks_and_partition || !no_new_pseudos))
3311 mark_loop_exit_edges ();
3313 /* Compute postdominators if we think we'll use them. */
3314 if (HAVE_conditional_execution || life_data_ok)
3315 calculate_dominance_info (CDI_POST_DOMINATORS);
3317 if (life_data_ok)
3318 clear_bb_flags ();
3320 /* Go through each of the basic blocks looking for things to convert. If we
3321 have conditional execution, we make multiple passes to allow us to handle
3322 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3323 pass = 0;
3326 cond_exec_changed_p = FALSE;
3327 pass++;
3329 #ifdef IFCVT_MULTIPLE_DUMPS
3330 if (dump_file && pass > 1)
3331 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3332 #endif
3334 FOR_EACH_BB (bb)
3336 basic_block new_bb;
3337 while ((new_bb = find_if_header (bb, pass)))
3338 bb = new_bb;
3341 #ifdef IFCVT_MULTIPLE_DUMPS
3342 if (dump_file && cond_exec_changed_p)
3343 print_rtl_with_bb (dump_file, get_insns ());
3344 #endif
3346 while (cond_exec_changed_p);
3348 #ifdef IFCVT_MULTIPLE_DUMPS
3349 if (dump_file)
3350 fprintf (dump_file, "\n\n========== no more changes\n");
3351 #endif
3353 free_dominance_info (CDI_POST_DOMINATORS);
3355 if (dump_file)
3356 fflush (dump_file);
3358 clear_aux_for_blocks ();
3360 /* Rebuild life info for basic blocks that require it. */
3361 if (num_true_changes && life_data_ok)
3363 /* If we allocated new pseudos, we must resize the array for sched1. */
3364 if (max_regno < max_reg_num ())
3366 max_regno = max_reg_num ();
3367 allocate_reg_info (max_regno, FALSE, FALSE);
3369 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3370 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3371 | PROP_KILL_DEAD_CODE);
3374 /* Write the final stats. */
3375 if (dump_file && num_possible_if_blocks > 0)
3377 fprintf (dump_file,
3378 "\n%d possible IF blocks searched.\n",
3379 num_possible_if_blocks);
3380 fprintf (dump_file,
3381 "%d IF blocks converted.\n",
3382 num_updated_if_blocks);
3383 fprintf (dump_file,
3384 "%d true changes made.\n\n\n",
3385 num_true_changes);
3388 #ifdef ENABLE_CHECKING
3389 verify_flow_info ();
3390 #endif