tree-core.h: Include symtab.h.
[official-gcc.git] / gcc / ifcvt.c
bloba9ec6a391106df6a40addb9c0be2a2a0442093ec
1 /* If-conversion support.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "df.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "cfgrtl.h"
34 #include "cfganal.h"
35 #include "cfgcleanup.h"
36 #include "alias.h"
37 #include "expmed.h"
38 #include "dojump.h"
39 #include "explow.h"
40 #include "calls.h"
41 #include "emit-rtl.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "insn-codes.h"
47 #include "optabs.h"
48 #include "diagnostic-core.h"
49 #include "tm_p.h"
50 #include "cfgloop.h"
51 #include "target.h"
52 #include "tree-pass.h"
53 #include "dbgcnt.h"
54 #include "shrink-wrap.h"
55 #include "ifcvt.h"
57 #ifndef HAVE_incscc
58 #define HAVE_incscc 0
59 #endif
60 #ifndef HAVE_decscc
61 #define HAVE_decscc 0
62 #endif
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE \
66 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
67 + 1)
68 #endif
70 #ifndef HAVE_cbranchcc4
71 #define HAVE_cbranchcc4 0
72 #endif
74 #define IFCVT_MULTIPLE_DUMPS 1
76 #define NULL_BLOCK ((basic_block) NULL)
78 /* True if after combine pass. */
79 static bool ifcvt_after_combine;
81 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
82 static int num_possible_if_blocks;
84 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
85 execution. */
86 static int num_updated_if_blocks;
88 /* # of changes made. */
89 static int num_true_changes;
91 /* Whether conditional execution changes were made. */
92 static int cond_exec_changed_p;
94 /* Forward references. */
95 static int count_bb_insns (const_basic_block);
96 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
97 static rtx_insn *first_active_insn (basic_block);
98 static rtx_insn *last_active_insn (basic_block, int);
99 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
100 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
101 static basic_block block_fallthru (basic_block);
102 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
103 int);
104 static rtx cond_exec_get_condition (rtx_insn *);
105 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
106 static int noce_operand_ok (const_rtx);
107 static void merge_if_block (ce_if_block *);
108 static int find_cond_trap (basic_block, edge, edge);
109 static basic_block find_if_header (basic_block, int);
110 static int block_jumps_and_fallthru_p (basic_block, basic_block);
111 static int noce_find_if_block (basic_block, edge, edge, int);
112 static int cond_exec_find_if_block (ce_if_block *);
113 static int find_if_case_1 (basic_block, edge, edge);
114 static int find_if_case_2 (basic_block, edge, edge);
115 static int dead_or_predicable (basic_block, basic_block, basic_block,
116 edge, int);
117 static void noce_emit_move_insn (rtx, rtx);
118 static rtx_insn *block_has_only_trap (basic_block);
120 /* Count the number of non-jump active insns in BB. */
122 static int
123 count_bb_insns (const_basic_block bb)
125 int count = 0;
126 rtx_insn *insn = BB_HEAD (bb);
128 while (1)
130 if (active_insn_p (insn) && !JUMP_P (insn))
131 count++;
133 if (insn == BB_END (bb))
134 break;
135 insn = NEXT_INSN (insn);
138 return count;
141 /* Determine whether the total insn_rtx_cost on non-jump insns in
142 basic block BB is less than MAX_COST. This function returns
143 false if the cost of any instruction could not be estimated.
145 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
146 as those insns are being speculated. MAX_COST is scaled with SCALE
147 plus a small fudge factor. */
149 static bool
150 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
152 int count = 0;
153 rtx_insn *insn = BB_HEAD (bb);
154 bool speed = optimize_bb_for_speed_p (bb);
156 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
157 applied to insn_rtx_cost when optimizing for size. Only do
158 this after combine because if-conversion might interfere with
159 passes before combine.
161 Use optimize_function_for_speed_p instead of the pre-defined
162 variable speed to make sure it is set to same value for all
163 basic blocks in one if-conversion transformation. */
164 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
165 scale = REG_BR_PROB_BASE;
166 /* Our branch probability/scaling factors are just estimates and don't
167 account for cases where we can get speculation for free and other
168 secondary benefits. So we fudge the scale factor to make speculating
169 appear a little more profitable when optimizing for performance. */
170 else
171 scale += REG_BR_PROB_BASE / 8;
174 max_cost *= scale;
176 while (1)
178 if (NONJUMP_INSN_P (insn))
180 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
181 if (cost == 0)
182 return false;
184 /* If this instruction is the load or set of a "stack" register,
185 such as a floating point register on x87, then the cost of
186 speculatively executing this insn may need to include
187 the additional cost of popping its result off of the
188 register stack. Unfortunately, correctly recognizing and
189 accounting for this additional overhead is tricky, so for
190 now we simply prohibit such speculative execution. */
191 #ifdef STACK_REGS
193 rtx set = single_set (insn);
194 if (set && STACK_REG_P (SET_DEST (set)))
195 return false;
197 #endif
199 count += cost;
200 if (count >= max_cost)
201 return false;
203 else if (CALL_P (insn))
204 return false;
206 if (insn == BB_END (bb))
207 break;
208 insn = NEXT_INSN (insn);
211 return true;
214 /* Return the first non-jump active insn in the basic block. */
216 static rtx_insn *
217 first_active_insn (basic_block bb)
219 rtx_insn *insn = BB_HEAD (bb);
221 if (LABEL_P (insn))
223 if (insn == BB_END (bb))
224 return NULL;
225 insn = NEXT_INSN (insn);
228 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
230 if (insn == BB_END (bb))
231 return NULL;
232 insn = NEXT_INSN (insn);
235 if (JUMP_P (insn))
236 return NULL;
238 return insn;
241 /* Return the last non-jump active (non-jump) insn in the basic block. */
243 static rtx_insn *
244 last_active_insn (basic_block bb, int skip_use_p)
246 rtx_insn *insn = BB_END (bb);
247 rtx_insn *head = BB_HEAD (bb);
249 while (NOTE_P (insn)
250 || JUMP_P (insn)
251 || DEBUG_INSN_P (insn)
252 || (skip_use_p
253 && NONJUMP_INSN_P (insn)
254 && GET_CODE (PATTERN (insn)) == USE))
256 if (insn == head)
257 return NULL;
258 insn = PREV_INSN (insn);
261 if (LABEL_P (insn))
262 return NULL;
264 return insn;
267 /* Return the active insn before INSN inside basic block CURR_BB. */
269 static rtx_insn *
270 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
272 if (!insn || insn == BB_HEAD (curr_bb))
273 return NULL;
275 while ((insn = PREV_INSN (insn)) != NULL_RTX)
277 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
278 break;
280 /* No other active insn all the way to the start of the basic block. */
281 if (insn == BB_HEAD (curr_bb))
282 return NULL;
285 return insn;
288 /* Return the active insn after INSN inside basic block CURR_BB. */
290 static rtx_insn *
291 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
293 if (!insn || insn == BB_END (curr_bb))
294 return NULL;
296 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
298 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
299 break;
301 /* No other active insn all the way to the end of the basic block. */
302 if (insn == BB_END (curr_bb))
303 return NULL;
306 return insn;
309 /* Return the basic block reached by falling though the basic block BB. */
311 static basic_block
312 block_fallthru (basic_block bb)
314 edge e = find_fallthru_edge (bb->succs);
316 return (e) ? e->dest : NULL_BLOCK;
319 /* Return true if RTXs A and B can be safely interchanged. */
321 static bool
322 rtx_interchangeable_p (const_rtx a, const_rtx b)
324 if (!rtx_equal_p (a, b))
325 return false;
327 if (GET_CODE (a) != MEM)
328 return true;
330 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
331 reference is not. Interchanging a dead type-unsafe memory reference with
332 a live type-safe one creates a live type-unsafe memory reference, in other
333 words, it makes the program illegal.
334 We check here conservatively whether the two memory references have equal
335 memory attributes. */
337 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
341 /* Go through a bunch of insns, converting them to conditional
342 execution format if possible. Return TRUE if all of the non-note
343 insns were processed. */
345 static int
346 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
347 /* if block information */rtx_insn *start,
348 /* first insn to look at */rtx end,
349 /* last insn to look at */rtx test,
350 /* conditional execution test */int prob_val,
351 /* probability of branch taken. */int mod_ok)
353 int must_be_last = FALSE;
354 rtx_insn *insn;
355 rtx xtest;
356 rtx pattern;
358 if (!start || !end)
359 return FALSE;
361 for (insn = start; ; insn = NEXT_INSN (insn))
363 /* dwarf2out can't cope with conditional prologues. */
364 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
365 return FALSE;
367 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
368 goto insn_done;
370 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
372 /* dwarf2out can't cope with conditional unwind info. */
373 if (RTX_FRAME_RELATED_P (insn))
374 return FALSE;
376 /* Remove USE insns that get in the way. */
377 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
379 /* ??? Ug. Actually unlinking the thing is problematic,
380 given what we'd have to coordinate with our callers. */
381 SET_INSN_DELETED (insn);
382 goto insn_done;
385 /* Last insn wasn't last? */
386 if (must_be_last)
387 return FALSE;
389 if (modified_in_p (test, insn))
391 if (!mod_ok)
392 return FALSE;
393 must_be_last = TRUE;
396 /* Now build the conditional form of the instruction. */
397 pattern = PATTERN (insn);
398 xtest = copy_rtx (test);
400 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
401 two conditions. */
402 if (GET_CODE (pattern) == COND_EXEC)
404 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
405 return FALSE;
407 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
408 COND_EXEC_TEST (pattern));
409 pattern = COND_EXEC_CODE (pattern);
412 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
414 /* If the machine needs to modify the insn being conditionally executed,
415 say for example to force a constant integer operand into a temp
416 register, do so here. */
417 #ifdef IFCVT_MODIFY_INSN
418 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
419 if (! pattern)
420 return FALSE;
421 #endif
423 validate_change (insn, &PATTERN (insn), pattern, 1);
425 if (CALL_P (insn) && prob_val >= 0)
426 validate_change (insn, &REG_NOTES (insn),
427 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
428 prob_val, REG_NOTES (insn)), 1);
430 insn_done:
431 if (insn == end)
432 break;
435 return TRUE;
438 /* Return the condition for a jump. Do not do any special processing. */
440 static rtx
441 cond_exec_get_condition (rtx_insn *jump)
443 rtx test_if, cond;
445 if (any_condjump_p (jump))
446 test_if = SET_SRC (pc_set (jump));
447 else
448 return NULL_RTX;
449 cond = XEXP (test_if, 0);
451 /* If this branches to JUMP_LABEL when the condition is false,
452 reverse the condition. */
453 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
454 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
456 enum rtx_code rev = reversed_comparison_code (cond, jump);
457 if (rev == UNKNOWN)
458 return NULL_RTX;
460 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
461 XEXP (cond, 1));
464 return cond;
467 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
468 to conditional execution. Return TRUE if we were successful at
469 converting the block. */
471 static int
472 cond_exec_process_if_block (ce_if_block * ce_info,
473 /* if block information */int do_multiple_p)
475 basic_block test_bb = ce_info->test_bb; /* last test block */
476 basic_block then_bb = ce_info->then_bb; /* THEN */
477 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
478 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
479 rtx_insn *then_start; /* first insn in THEN block */
480 rtx_insn *then_end; /* last insn + 1 in THEN block */
481 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
482 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
483 int max; /* max # of insns to convert. */
484 int then_mod_ok; /* whether conditional mods are ok in THEN */
485 rtx true_expr; /* test for else block insns */
486 rtx false_expr; /* test for then block insns */
487 int true_prob_val; /* probability of else block */
488 int false_prob_val; /* probability of then block */
489 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
490 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
491 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
492 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
493 int then_n_insns, else_n_insns, n_insns;
494 enum rtx_code false_code;
495 rtx note;
497 /* If test is comprised of && or || elements, and we've failed at handling
498 all of them together, just use the last test if it is the special case of
499 && elements without an ELSE block. */
500 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
502 if (else_bb || ! ce_info->and_and_p)
503 return FALSE;
505 ce_info->test_bb = test_bb = ce_info->last_test_bb;
506 ce_info->num_multiple_test_blocks = 0;
507 ce_info->num_and_and_blocks = 0;
508 ce_info->num_or_or_blocks = 0;
511 /* Find the conditional jump to the ELSE or JOIN part, and isolate
512 the test. */
513 test_expr = cond_exec_get_condition (BB_END (test_bb));
514 if (! test_expr)
515 return FALSE;
517 /* If the conditional jump is more than just a conditional jump,
518 then we can not do conditional execution conversion on this block. */
519 if (! onlyjump_p (BB_END (test_bb)))
520 return FALSE;
522 /* Collect the bounds of where we're to search, skipping any labels, jumps
523 and notes at the beginning and end of the block. Then count the total
524 number of insns and see if it is small enough to convert. */
525 then_start = first_active_insn (then_bb);
526 then_end = last_active_insn (then_bb, TRUE);
527 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
528 n_insns = then_n_insns;
529 max = MAX_CONDITIONAL_EXECUTE;
531 if (else_bb)
533 int n_matching;
535 max *= 2;
536 else_start = first_active_insn (else_bb);
537 else_end = last_active_insn (else_bb, TRUE);
538 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
539 n_insns += else_n_insns;
541 /* Look for matching sequences at the head and tail of the two blocks,
542 and limit the range of insns to be converted if possible. */
543 n_matching = flow_find_cross_jump (then_bb, else_bb,
544 &then_first_tail, &else_first_tail,
545 NULL);
546 if (then_first_tail == BB_HEAD (then_bb))
547 then_start = then_end = NULL;
548 if (else_first_tail == BB_HEAD (else_bb))
549 else_start = else_end = NULL;
551 if (n_matching > 0)
553 if (then_end)
554 then_end = find_active_insn_before (then_bb, then_first_tail);
555 if (else_end)
556 else_end = find_active_insn_before (else_bb, else_first_tail);
557 n_insns -= 2 * n_matching;
560 if (then_start
561 && else_start
562 && then_n_insns > n_matching
563 && else_n_insns > n_matching)
565 int longest_match = MIN (then_n_insns - n_matching,
566 else_n_insns - n_matching);
567 n_matching
568 = flow_find_head_matching_sequence (then_bb, else_bb,
569 &then_last_head,
570 &else_last_head,
571 longest_match);
573 if (n_matching > 0)
575 rtx_insn *insn;
577 /* We won't pass the insns in the head sequence to
578 cond_exec_process_insns, so we need to test them here
579 to make sure that they don't clobber the condition. */
580 for (insn = BB_HEAD (then_bb);
581 insn != NEXT_INSN (then_last_head);
582 insn = NEXT_INSN (insn))
583 if (!LABEL_P (insn) && !NOTE_P (insn)
584 && !DEBUG_INSN_P (insn)
585 && modified_in_p (test_expr, insn))
586 return FALSE;
589 if (then_last_head == then_end)
590 then_start = then_end = NULL;
591 if (else_last_head == else_end)
592 else_start = else_end = NULL;
594 if (n_matching > 0)
596 if (then_start)
597 then_start = find_active_insn_after (then_bb, then_last_head);
598 if (else_start)
599 else_start = find_active_insn_after (else_bb, else_last_head);
600 n_insns -= 2 * n_matching;
605 if (n_insns > max)
606 return FALSE;
608 /* Map test_expr/test_jump into the appropriate MD tests to use on
609 the conditionally executed code. */
611 true_expr = test_expr;
613 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
614 if (false_code != UNKNOWN)
615 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
616 XEXP (true_expr, 0), XEXP (true_expr, 1));
617 else
618 false_expr = NULL_RTX;
620 #ifdef IFCVT_MODIFY_TESTS
621 /* If the machine description needs to modify the tests, such as setting a
622 conditional execution register from a comparison, it can do so here. */
623 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
625 /* See if the conversion failed. */
626 if (!true_expr || !false_expr)
627 goto fail;
628 #endif
630 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
631 if (note)
633 true_prob_val = XINT (note, 0);
634 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
636 else
638 true_prob_val = -1;
639 false_prob_val = -1;
642 /* If we have && or || tests, do them here. These tests are in the adjacent
643 blocks after the first block containing the test. */
644 if (ce_info->num_multiple_test_blocks > 0)
646 basic_block bb = test_bb;
647 basic_block last_test_bb = ce_info->last_test_bb;
649 if (! false_expr)
650 goto fail;
654 rtx_insn *start, *end;
655 rtx t, f;
656 enum rtx_code f_code;
658 bb = block_fallthru (bb);
659 start = first_active_insn (bb);
660 end = last_active_insn (bb, TRUE);
661 if (start
662 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
663 false_prob_val, FALSE))
664 goto fail;
666 /* If the conditional jump is more than just a conditional jump, then
667 we can not do conditional execution conversion on this block. */
668 if (! onlyjump_p (BB_END (bb)))
669 goto fail;
671 /* Find the conditional jump and isolate the test. */
672 t = cond_exec_get_condition (BB_END (bb));
673 if (! t)
674 goto fail;
676 f_code = reversed_comparison_code (t, BB_END (bb));
677 if (f_code == UNKNOWN)
678 goto fail;
680 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
681 if (ce_info->and_and_p)
683 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
684 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
686 else
688 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
689 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
692 /* If the machine description needs to modify the tests, such as
693 setting a conditional execution register from a comparison, it can
694 do so here. */
695 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
696 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
698 /* See if the conversion failed. */
699 if (!t || !f)
700 goto fail;
701 #endif
703 true_expr = t;
704 false_expr = f;
706 while (bb != last_test_bb);
709 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
710 on then THEN block. */
711 then_mod_ok = (else_bb == NULL_BLOCK);
713 /* Go through the THEN and ELSE blocks converting the insns if possible
714 to conditional execution. */
716 if (then_end
717 && (! false_expr
718 || ! cond_exec_process_insns (ce_info, then_start, then_end,
719 false_expr, false_prob_val,
720 then_mod_ok)))
721 goto fail;
723 if (else_bb && else_end
724 && ! cond_exec_process_insns (ce_info, else_start, else_end,
725 true_expr, true_prob_val, TRUE))
726 goto fail;
728 /* If we cannot apply the changes, fail. Do not go through the normal fail
729 processing, since apply_change_group will call cancel_changes. */
730 if (! apply_change_group ())
732 #ifdef IFCVT_MODIFY_CANCEL
733 /* Cancel any machine dependent changes. */
734 IFCVT_MODIFY_CANCEL (ce_info);
735 #endif
736 return FALSE;
739 #ifdef IFCVT_MODIFY_FINAL
740 /* Do any machine dependent final modifications. */
741 IFCVT_MODIFY_FINAL (ce_info);
742 #endif
744 /* Conversion succeeded. */
745 if (dump_file)
746 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
747 n_insns, (n_insns == 1) ? " was" : "s were");
749 /* Merge the blocks! If we had matching sequences, make sure to delete one
750 copy at the appropriate location first: delete the copy in the THEN branch
751 for a tail sequence so that the remaining one is executed last for both
752 branches, and delete the copy in the ELSE branch for a head sequence so
753 that the remaining one is executed first for both branches. */
754 if (then_first_tail)
756 rtx_insn *from = then_first_tail;
757 if (!INSN_P (from))
758 from = find_active_insn_after (then_bb, from);
759 delete_insn_chain (from, BB_END (then_bb), false);
761 if (else_last_head)
762 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
764 merge_if_block (ce_info);
765 cond_exec_changed_p = TRUE;
766 return TRUE;
768 fail:
769 #ifdef IFCVT_MODIFY_CANCEL
770 /* Cancel any machine dependent changes. */
771 IFCVT_MODIFY_CANCEL (ce_info);
772 #endif
774 cancel_changes (0);
775 return FALSE;
778 /* Used by noce_process_if_block to communicate with its subroutines.
780 The subroutines know that A and B may be evaluated freely. They
781 know that X is a register. They should insert new instructions
782 before cond_earliest. */
784 struct noce_if_info
786 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
787 basic_block test_bb, then_bb, else_bb, join_bb;
789 /* The jump that ends TEST_BB. */
790 rtx_insn *jump;
792 /* The jump condition. */
793 rtx cond;
795 /* New insns should be inserted before this one. */
796 rtx_insn *cond_earliest;
798 /* Insns in the THEN and ELSE block. There is always just this
799 one insns in those blocks. The insns are single_set insns.
800 If there was no ELSE block, INSN_B is the last insn before
801 COND_EARLIEST, or NULL_RTX. In the former case, the insn
802 operands are still valid, as if INSN_B was moved down below
803 the jump. */
804 rtx_insn *insn_a, *insn_b;
806 /* The SET_SRC of INSN_A and INSN_B. */
807 rtx a, b;
809 /* The SET_DEST of INSN_A. */
810 rtx x;
812 /* True if this if block is not canonical. In the canonical form of
813 if blocks, the THEN_BB is the block reached via the fallthru edge
814 from TEST_BB. For the noce transformations, we allow the symmetric
815 form as well. */
816 bool then_else_reversed;
818 /* Estimated cost of the particular branch instruction. */
819 int branch_cost;
822 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
823 static int noce_try_move (struct noce_if_info *);
824 static int noce_try_store_flag (struct noce_if_info *);
825 static int noce_try_addcc (struct noce_if_info *);
826 static int noce_try_store_flag_constants (struct noce_if_info *);
827 static int noce_try_store_flag_mask (struct noce_if_info *);
828 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
829 rtx, rtx, rtx);
830 static int noce_try_cmove (struct noce_if_info *);
831 static int noce_try_cmove_arith (struct noce_if_info *);
832 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
833 static int noce_try_minmax (struct noce_if_info *);
834 static int noce_try_abs (struct noce_if_info *);
835 static int noce_try_sign_mask (struct noce_if_info *);
837 /* Helper function for noce_try_store_flag*. */
839 static rtx
840 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
841 int normalize)
843 rtx cond = if_info->cond;
844 int cond_complex;
845 enum rtx_code code;
847 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
848 || ! general_operand (XEXP (cond, 1), VOIDmode));
850 /* If earliest == jump, or when the condition is complex, try to
851 build the store_flag insn directly. */
853 if (cond_complex)
855 rtx set = pc_set (if_info->jump);
856 cond = XEXP (SET_SRC (set), 0);
857 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
858 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
859 reversep = !reversep;
860 if (if_info->then_else_reversed)
861 reversep = !reversep;
864 if (reversep)
865 code = reversed_comparison_code (cond, if_info->jump);
866 else
867 code = GET_CODE (cond);
869 if ((if_info->cond_earliest == if_info->jump || cond_complex)
870 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
872 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
873 XEXP (cond, 1));
874 rtx set = gen_rtx_SET (x, src);
876 start_sequence ();
877 rtx_insn *insn = emit_insn (set);
879 if (recog_memoized (insn) >= 0)
881 rtx_insn *seq = get_insns ();
882 end_sequence ();
883 emit_insn (seq);
885 if_info->cond_earliest = if_info->jump;
887 return x;
890 end_sequence ();
893 /* Don't even try if the comparison operands or the mode of X are weird. */
894 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
895 return NULL_RTX;
897 return emit_store_flag (x, code, XEXP (cond, 0),
898 XEXP (cond, 1), VOIDmode,
899 (code == LTU || code == LEU
900 || code == GEU || code == GTU), normalize);
903 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
904 X is the destination/target and Y is the value to copy. */
906 static void
907 noce_emit_move_insn (rtx x, rtx y)
909 machine_mode outmode;
910 rtx outer, inner;
911 int bitpos;
913 if (GET_CODE (x) != STRICT_LOW_PART)
915 rtx_insn *seq, *insn;
916 rtx target;
917 optab ot;
919 start_sequence ();
920 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
921 otherwise construct a suitable SET pattern ourselves. */
922 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
923 ? emit_move_insn (x, y)
924 : emit_insn (gen_rtx_SET (x, y));
925 seq = get_insns ();
926 end_sequence ();
928 if (recog_memoized (insn) <= 0)
930 if (GET_CODE (x) == ZERO_EXTRACT)
932 rtx op = XEXP (x, 0);
933 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
934 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
936 /* store_bit_field expects START to be relative to
937 BYTES_BIG_ENDIAN and adjusts this value for machines with
938 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
939 invoke store_bit_field again it is necessary to have the START
940 value from the first call. */
941 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
943 if (MEM_P (op))
944 start = BITS_PER_UNIT - start - size;
945 else
947 gcc_assert (REG_P (op));
948 start = BITS_PER_WORD - start - size;
952 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
953 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
954 return;
957 switch (GET_RTX_CLASS (GET_CODE (y)))
959 case RTX_UNARY:
960 ot = code_to_optab (GET_CODE (y));
961 if (ot)
963 start_sequence ();
964 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
965 if (target != NULL_RTX)
967 if (target != x)
968 emit_move_insn (x, target);
969 seq = get_insns ();
971 end_sequence ();
973 break;
975 case RTX_BIN_ARITH:
976 case RTX_COMM_ARITH:
977 ot = code_to_optab (GET_CODE (y));
978 if (ot)
980 start_sequence ();
981 target = expand_binop (GET_MODE (y), ot,
982 XEXP (y, 0), XEXP (y, 1),
983 x, 0, OPTAB_DIRECT);
984 if (target != NULL_RTX)
986 if (target != x)
987 emit_move_insn (x, target);
988 seq = get_insns ();
990 end_sequence ();
992 break;
994 default:
995 break;
999 emit_insn (seq);
1000 return;
1003 outer = XEXP (x, 0);
1004 inner = XEXP (outer, 0);
1005 outmode = GET_MODE (outer);
1006 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1007 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1008 0, 0, outmode, y);
1011 /* Return the CC reg if it is used in COND. */
1013 static rtx
1014 cc_in_cond (rtx cond)
1016 if (HAVE_cbranchcc4 && cond
1017 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1018 return XEXP (cond, 0);
1020 return NULL_RTX;
1023 /* Return sequence of instructions generated by if conversion. This
1024 function calls end_sequence() to end the current stream, ensures
1025 that the instructions are unshared, recognizable non-jump insns.
1026 On failure, this function returns a NULL_RTX. */
1028 static rtx_insn *
1029 end_ifcvt_sequence (struct noce_if_info *if_info)
1031 rtx_insn *insn;
1032 rtx_insn *seq = get_insns ();
1033 rtx cc = cc_in_cond (if_info->cond);
1035 set_used_flags (if_info->x);
1036 set_used_flags (if_info->cond);
1037 set_used_flags (if_info->a);
1038 set_used_flags (if_info->b);
1039 unshare_all_rtl_in_chain (seq);
1040 end_sequence ();
1042 /* Make sure that all of the instructions emitted are recognizable,
1043 and that we haven't introduced a new jump instruction.
1044 As an exercise for the reader, build a general mechanism that
1045 allows proper placement of required clobbers. */
1046 for (insn = seq; insn; insn = NEXT_INSN (insn))
1047 if (JUMP_P (insn)
1048 || recog_memoized (insn) == -1
1049 /* Make sure new generated code does not clobber CC. */
1050 || (cc && set_of (cc, insn)))
1051 return NULL;
1053 return seq;
1056 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1057 "if (a == b) x = a; else x = b" into "x = b". */
1059 static int
1060 noce_try_move (struct noce_if_info *if_info)
1062 rtx cond = if_info->cond;
1063 enum rtx_code code = GET_CODE (cond);
1064 rtx y;
1065 rtx_insn *seq;
1067 if (code != NE && code != EQ)
1068 return FALSE;
1070 /* This optimization isn't valid if either A or B could be a NaN
1071 or a signed zero. */
1072 if (HONOR_NANS (if_info->x)
1073 || HONOR_SIGNED_ZEROS (if_info->x))
1074 return FALSE;
1076 /* Check whether the operands of the comparison are A and in
1077 either order. */
1078 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1079 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1080 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1081 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1083 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1084 return FALSE;
1086 y = (code == EQ) ? if_info->a : if_info->b;
1088 /* Avoid generating the move if the source is the destination. */
1089 if (! rtx_equal_p (if_info->x, y))
1091 start_sequence ();
1092 noce_emit_move_insn (if_info->x, y);
1093 seq = end_ifcvt_sequence (if_info);
1094 if (!seq)
1095 return FALSE;
1097 emit_insn_before_setloc (seq, if_info->jump,
1098 INSN_LOCATION (if_info->insn_a));
1100 return TRUE;
1102 return FALSE;
1105 /* Convert "if (test) x = 1; else x = 0".
1107 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1108 tried in noce_try_store_flag_constants after noce_try_cmove has had
1109 a go at the conversion. */
1111 static int
1112 noce_try_store_flag (struct noce_if_info *if_info)
1114 int reversep;
1115 rtx target;
1116 rtx_insn *seq;
1118 if (CONST_INT_P (if_info->b)
1119 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1120 && if_info->a == const0_rtx)
1121 reversep = 0;
1122 else if (if_info->b == const0_rtx
1123 && CONST_INT_P (if_info->a)
1124 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1125 && (reversed_comparison_code (if_info->cond, if_info->jump)
1126 != UNKNOWN))
1127 reversep = 1;
1128 else
1129 return FALSE;
1131 start_sequence ();
1133 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1134 if (target)
1136 if (target != if_info->x)
1137 noce_emit_move_insn (if_info->x, target);
1139 seq = end_ifcvt_sequence (if_info);
1140 if (! seq)
1141 return FALSE;
1143 emit_insn_before_setloc (seq, if_info->jump,
1144 INSN_LOCATION (if_info->insn_a));
1145 return TRUE;
1147 else
1149 end_sequence ();
1150 return FALSE;
1154 /* Convert "if (test) x = a; else x = b", for A and B constant. */
1156 static int
1157 noce_try_store_flag_constants (struct noce_if_info *if_info)
1159 rtx target;
1160 rtx_insn *seq;
1161 int reversep;
1162 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1163 int normalize, can_reverse;
1164 machine_mode mode;
1166 if (CONST_INT_P (if_info->a)
1167 && CONST_INT_P (if_info->b))
1169 mode = GET_MODE (if_info->x);
1170 ifalse = INTVAL (if_info->a);
1171 itrue = INTVAL (if_info->b);
1173 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1174 /* Make sure we can represent the difference between the two values. */
1175 if ((diff > 0)
1176 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1177 return FALSE;
1179 diff = trunc_int_for_mode (diff, mode);
1181 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1182 != UNKNOWN);
1184 reversep = 0;
1185 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1186 normalize = 0;
1187 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1188 && (STORE_FLAG_VALUE == 1
1189 || if_info->branch_cost >= 2))
1190 normalize = 1;
1191 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1192 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1193 normalize = 1, reversep = 1;
1194 else if (itrue == -1
1195 && (STORE_FLAG_VALUE == -1
1196 || if_info->branch_cost >= 2))
1197 normalize = -1;
1198 else if (ifalse == -1 && can_reverse
1199 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1200 normalize = -1, reversep = 1;
1201 else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1202 || if_info->branch_cost >= 3)
1203 normalize = -1;
1204 else
1205 return FALSE;
1207 if (reversep)
1209 std::swap (itrue, ifalse);
1210 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1213 start_sequence ();
1214 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1215 if (! target)
1217 end_sequence ();
1218 return FALSE;
1221 /* if (test) x = 3; else x = 4;
1222 => x = 3 + (test == 0); */
1223 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1225 target = expand_simple_binop (mode,
1226 (diff == STORE_FLAG_VALUE
1227 ? PLUS : MINUS),
1228 gen_int_mode (ifalse, mode), target,
1229 if_info->x, 0, OPTAB_WIDEN);
1232 /* if (test) x = 8; else x = 0;
1233 => x = (test != 0) << 3; */
1234 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1236 target = expand_simple_binop (mode, ASHIFT,
1237 target, GEN_INT (tmp), if_info->x, 0,
1238 OPTAB_WIDEN);
1241 /* if (test) x = -1; else x = b;
1242 => x = -(test != 0) | b; */
1243 else if (itrue == -1)
1245 target = expand_simple_binop (mode, IOR,
1246 target, gen_int_mode (ifalse, mode),
1247 if_info->x, 0, OPTAB_WIDEN);
1250 /* if (test) x = a; else x = b;
1251 => x = (-(test != 0) & (b - a)) + a; */
1252 else
1254 target = expand_simple_binop (mode, AND,
1255 target, gen_int_mode (diff, mode),
1256 if_info->x, 0, OPTAB_WIDEN);
1257 if (target)
1258 target = expand_simple_binop (mode, PLUS,
1259 target, gen_int_mode (ifalse, mode),
1260 if_info->x, 0, OPTAB_WIDEN);
1263 if (! target)
1265 end_sequence ();
1266 return FALSE;
1269 if (target != if_info->x)
1270 noce_emit_move_insn (if_info->x, target);
1272 seq = end_ifcvt_sequence (if_info);
1273 if (!seq)
1274 return FALSE;
1276 emit_insn_before_setloc (seq, if_info->jump,
1277 INSN_LOCATION (if_info->insn_a));
1278 return TRUE;
1281 return FALSE;
1284 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1285 similarly for "foo--". */
1287 static int
1288 noce_try_addcc (struct noce_if_info *if_info)
1290 rtx target;
1291 rtx_insn *seq;
1292 int subtract, normalize;
1294 if (GET_CODE (if_info->a) == PLUS
1295 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1296 && (reversed_comparison_code (if_info->cond, if_info->jump)
1297 != UNKNOWN))
1299 rtx cond = if_info->cond;
1300 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1302 /* First try to use addcc pattern. */
1303 if (general_operand (XEXP (cond, 0), VOIDmode)
1304 && general_operand (XEXP (cond, 1), VOIDmode))
1306 start_sequence ();
1307 target = emit_conditional_add (if_info->x, code,
1308 XEXP (cond, 0),
1309 XEXP (cond, 1),
1310 VOIDmode,
1311 if_info->b,
1312 XEXP (if_info->a, 1),
1313 GET_MODE (if_info->x),
1314 (code == LTU || code == GEU
1315 || code == LEU || code == GTU));
1316 if (target)
1318 if (target != if_info->x)
1319 noce_emit_move_insn (if_info->x, target);
1321 seq = end_ifcvt_sequence (if_info);
1322 if (!seq)
1323 return FALSE;
1325 emit_insn_before_setloc (seq, if_info->jump,
1326 INSN_LOCATION (if_info->insn_a));
1327 return TRUE;
1329 end_sequence ();
1332 /* If that fails, construct conditional increment or decrement using
1333 setcc. */
1334 if (if_info->branch_cost >= 2
1335 && (XEXP (if_info->a, 1) == const1_rtx
1336 || XEXP (if_info->a, 1) == constm1_rtx))
1338 start_sequence ();
1339 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1340 subtract = 0, normalize = 0;
1341 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1342 subtract = 1, normalize = 0;
1343 else
1344 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1347 target = noce_emit_store_flag (if_info,
1348 gen_reg_rtx (GET_MODE (if_info->x)),
1349 1, normalize);
1351 if (target)
1352 target = expand_simple_binop (GET_MODE (if_info->x),
1353 subtract ? MINUS : PLUS,
1354 if_info->b, target, if_info->x,
1355 0, OPTAB_WIDEN);
1356 if (target)
1358 if (target != if_info->x)
1359 noce_emit_move_insn (if_info->x, target);
1361 seq = end_ifcvt_sequence (if_info);
1362 if (!seq)
1363 return FALSE;
1365 emit_insn_before_setloc (seq, if_info->jump,
1366 INSN_LOCATION (if_info->insn_a));
1367 return TRUE;
1369 end_sequence ();
1373 return FALSE;
1376 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1378 static int
1379 noce_try_store_flag_mask (struct noce_if_info *if_info)
1381 rtx target;
1382 rtx_insn *seq;
1383 int reversep;
1385 reversep = 0;
1386 if ((if_info->branch_cost >= 2
1387 || STORE_FLAG_VALUE == -1)
1388 && ((if_info->a == const0_rtx
1389 && rtx_equal_p (if_info->b, if_info->x))
1390 || ((reversep = (reversed_comparison_code (if_info->cond,
1391 if_info->jump)
1392 != UNKNOWN))
1393 && if_info->b == const0_rtx
1394 && rtx_equal_p (if_info->a, if_info->x))))
1396 start_sequence ();
1397 target = noce_emit_store_flag (if_info,
1398 gen_reg_rtx (GET_MODE (if_info->x)),
1399 reversep, -1);
1400 if (target)
1401 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1402 if_info->x,
1403 target, if_info->x, 0,
1404 OPTAB_WIDEN);
1406 if (target)
1408 int old_cost, new_cost, insn_cost;
1409 int speed_p;
1411 if (target != if_info->x)
1412 noce_emit_move_insn (if_info->x, target);
1414 seq = end_ifcvt_sequence (if_info);
1415 if (!seq)
1416 return FALSE;
1418 speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1419 insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1420 old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1421 new_cost = seq_cost (seq, speed_p);
1423 if (new_cost > old_cost)
1424 return FALSE;
1426 emit_insn_before_setloc (seq, if_info->jump,
1427 INSN_LOCATION (if_info->insn_a));
1428 return TRUE;
1431 end_sequence ();
1434 return FALSE;
1437 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1439 static rtx
1440 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1441 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1443 rtx target ATTRIBUTE_UNUSED;
1444 int unsignedp ATTRIBUTE_UNUSED;
1446 /* If earliest == jump, try to build the cmove insn directly.
1447 This is helpful when combine has created some complex condition
1448 (like for alpha's cmovlbs) that we can't hope to regenerate
1449 through the normal interface. */
1451 if (if_info->cond_earliest == if_info->jump)
1453 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1454 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1455 cond, vtrue, vfalse);
1456 rtx set = gen_rtx_SET (x, if_then_else);
1458 start_sequence ();
1459 rtx_insn *insn = emit_insn (set);
1461 if (recog_memoized (insn) >= 0)
1463 rtx_insn *seq = get_insns ();
1464 end_sequence ();
1465 emit_insn (seq);
1467 return x;
1470 end_sequence ();
1473 /* Don't even try if the comparison operands are weird
1474 except that the target supports cbranchcc4. */
1475 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1476 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1478 if (!(HAVE_cbranchcc4)
1479 || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1480 || cmp_b != const0_rtx)
1481 return NULL_RTX;
1484 unsignedp = (code == LTU || code == GEU
1485 || code == LEU || code == GTU);
1487 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1488 vtrue, vfalse, GET_MODE (x),
1489 unsignedp);
1490 if (target)
1491 return target;
1493 /* We might be faced with a situation like:
1495 x = (reg:M TARGET)
1496 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1497 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1499 We can't do a conditional move in mode M, but it's possible that we
1500 could do a conditional move in mode N instead and take a subreg of
1501 the result.
1503 If we can't create new pseudos, though, don't bother. */
1504 if (reload_completed)
1505 return NULL_RTX;
1507 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1509 rtx reg_vtrue = SUBREG_REG (vtrue);
1510 rtx reg_vfalse = SUBREG_REG (vfalse);
1511 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1512 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1513 rtx promoted_target;
1515 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1516 || byte_vtrue != byte_vfalse
1517 || (SUBREG_PROMOTED_VAR_P (vtrue)
1518 != SUBREG_PROMOTED_VAR_P (vfalse))
1519 || (SUBREG_PROMOTED_GET (vtrue)
1520 != SUBREG_PROMOTED_GET (vfalse)))
1521 return NULL_RTX;
1523 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1525 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1526 VOIDmode, reg_vtrue, reg_vfalse,
1527 GET_MODE (reg_vtrue), unsignedp);
1528 /* Nope, couldn't do it in that mode either. */
1529 if (!target)
1530 return NULL_RTX;
1532 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1533 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1534 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1535 emit_move_insn (x, target);
1536 return x;
1538 else
1539 return NULL_RTX;
1542 /* Try only simple constants and registers here. More complex cases
1543 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1544 has had a go at it. */
1546 static int
1547 noce_try_cmove (struct noce_if_info *if_info)
1549 enum rtx_code code;
1550 rtx target;
1551 rtx_insn *seq;
1553 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1554 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1556 start_sequence ();
1558 code = GET_CODE (if_info->cond);
1559 target = noce_emit_cmove (if_info, if_info->x, code,
1560 XEXP (if_info->cond, 0),
1561 XEXP (if_info->cond, 1),
1562 if_info->a, if_info->b);
1564 if (target)
1566 if (target != if_info->x)
1567 noce_emit_move_insn (if_info->x, target);
1569 seq = end_ifcvt_sequence (if_info);
1570 if (!seq)
1571 return FALSE;
1573 emit_insn_before_setloc (seq, if_info->jump,
1574 INSN_LOCATION (if_info->insn_a));
1575 return TRUE;
1577 else
1579 end_sequence ();
1580 return FALSE;
1584 return FALSE;
1587 /* Try more complex cases involving conditional_move. */
1589 static int
1590 noce_try_cmove_arith (struct noce_if_info *if_info)
1592 rtx a = if_info->a;
1593 rtx b = if_info->b;
1594 rtx x = if_info->x;
1595 rtx orig_a, orig_b;
1596 rtx_insn *insn_a, *insn_b;
1597 rtx target;
1598 int is_mem = 0;
1599 int insn_cost;
1600 enum rtx_code code;
1601 rtx_insn *ifcvt_seq;
1603 /* A conditional move from two memory sources is equivalent to a
1604 conditional on their addresses followed by a load. Don't do this
1605 early because it'll screw alias analysis. Note that we've
1606 already checked for no side effects. */
1607 /* ??? FIXME: Magic number 5. */
1608 if (cse_not_expected
1609 && MEM_P (a) && MEM_P (b)
1610 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1611 && if_info->branch_cost >= 5)
1613 machine_mode address_mode = get_address_mode (a);
1615 a = XEXP (a, 0);
1616 b = XEXP (b, 0);
1617 x = gen_reg_rtx (address_mode);
1618 is_mem = 1;
1621 /* ??? We could handle this if we knew that a load from A or B could
1622 not trap or fault. This is also true if we've already loaded
1623 from the address along the path from ENTRY. */
1624 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1625 return FALSE;
1627 /* if (test) x = a + b; else x = c - d;
1628 => y = a + b;
1629 x = c - d;
1630 if (test)
1631 x = y;
1634 code = GET_CODE (if_info->cond);
1635 insn_a = if_info->insn_a;
1636 insn_b = if_info->insn_b;
1638 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1639 if insn_rtx_cost can't be estimated. */
1640 if (insn_a)
1642 insn_cost
1643 = insn_rtx_cost (PATTERN (insn_a),
1644 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1645 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1646 return FALSE;
1648 else
1649 insn_cost = 0;
1651 if (insn_b)
1653 insn_cost
1654 += insn_rtx_cost (PATTERN (insn_b),
1655 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1656 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1657 return FALSE;
1660 /* Possibly rearrange operands to make things come out more natural. */
1661 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1663 int reversep = 0;
1664 if (rtx_equal_p (b, x))
1665 reversep = 1;
1666 else if (general_operand (b, GET_MODE (b)))
1667 reversep = 1;
1669 if (reversep)
1671 code = reversed_comparison_code (if_info->cond, if_info->jump);
1672 std::swap (a, b);
1673 std::swap (insn_a, insn_b);
1677 start_sequence ();
1679 orig_a = a;
1680 orig_b = b;
1682 /* If either operand is complex, load it into a register first.
1683 The best way to do this is to copy the original insn. In this
1684 way we preserve any clobbers etc that the insn may have had.
1685 This is of course not possible in the IS_MEM case. */
1686 if (! general_operand (a, GET_MODE (a)))
1688 rtx_insn *insn;
1690 if (is_mem)
1692 rtx reg = gen_reg_rtx (GET_MODE (a));
1693 insn = emit_insn (gen_rtx_SET (reg, a));
1695 else if (! insn_a)
1696 goto end_seq_and_fail;
1697 else
1699 a = gen_reg_rtx (GET_MODE (a));
1700 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1701 rtx set = single_set (copy_of_a);
1702 SET_DEST (set) = a;
1703 insn = emit_insn (PATTERN (copy_of_a));
1705 if (recog_memoized (insn) < 0)
1706 goto end_seq_and_fail;
1708 if (! general_operand (b, GET_MODE (b)))
1710 rtx pat;
1711 rtx_insn *last;
1712 rtx_insn *new_insn;
1714 if (is_mem)
1716 rtx reg = gen_reg_rtx (GET_MODE (b));
1717 pat = gen_rtx_SET (reg, b);
1719 else if (! insn_b)
1720 goto end_seq_and_fail;
1721 else
1723 b = gen_reg_rtx (GET_MODE (b));
1724 rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1725 rtx set = single_set (copy_of_insn_b);
1726 SET_DEST (set) = b;
1727 pat = PATTERN (copy_of_insn_b);
1730 /* If insn to set up A clobbers any registers B depends on, try to
1731 swap insn that sets up A with the one that sets up B. If even
1732 that doesn't help, punt. */
1733 last = get_last_insn ();
1734 if (last && modified_in_p (orig_b, last))
1736 new_insn = emit_insn_before (pat, get_insns ());
1737 if (modified_in_p (orig_a, new_insn))
1738 goto end_seq_and_fail;
1740 else
1741 new_insn = emit_insn (pat);
1743 if (recog_memoized (new_insn) < 0)
1744 goto end_seq_and_fail;
1747 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1748 XEXP (if_info->cond, 1), a, b);
1750 if (! target)
1751 goto end_seq_and_fail;
1753 /* If we're handling a memory for above, emit the load now. */
1754 if (is_mem)
1756 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1758 /* Copy over flags as appropriate. */
1759 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1760 MEM_VOLATILE_P (mem) = 1;
1761 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1762 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1763 set_mem_align (mem,
1764 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1766 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1767 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1769 noce_emit_move_insn (if_info->x, mem);
1771 else if (target != x)
1772 noce_emit_move_insn (x, target);
1774 ifcvt_seq = end_ifcvt_sequence (if_info);
1775 if (!ifcvt_seq)
1776 return FALSE;
1778 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1779 INSN_LOCATION (if_info->insn_a));
1780 return TRUE;
1782 end_seq_and_fail:
1783 end_sequence ();
1784 return FALSE;
1787 /* For most cases, the simplified condition we found is the best
1788 choice, but this is not the case for the min/max/abs transforms.
1789 For these we wish to know that it is A or B in the condition. */
1791 static rtx
1792 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1793 rtx_insn **earliest)
1795 rtx cond, set;
1796 rtx_insn *insn;
1797 int reverse;
1799 /* If target is already mentioned in the known condition, return it. */
1800 if (reg_mentioned_p (target, if_info->cond))
1802 *earliest = if_info->cond_earliest;
1803 return if_info->cond;
1806 set = pc_set (if_info->jump);
1807 cond = XEXP (SET_SRC (set), 0);
1808 reverse
1809 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1810 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1811 if (if_info->then_else_reversed)
1812 reverse = !reverse;
1814 /* If we're looking for a constant, try to make the conditional
1815 have that constant in it. There are two reasons why it may
1816 not have the constant we want:
1818 1. GCC may have needed to put the constant in a register, because
1819 the target can't compare directly against that constant. For
1820 this case, we look for a SET immediately before the comparison
1821 that puts a constant in that register.
1823 2. GCC may have canonicalized the conditional, for example
1824 replacing "if x < 4" with "if x <= 3". We can undo that (or
1825 make equivalent types of changes) to get the constants we need
1826 if they're off by one in the right direction. */
1828 if (CONST_INT_P (target))
1830 enum rtx_code code = GET_CODE (if_info->cond);
1831 rtx op_a = XEXP (if_info->cond, 0);
1832 rtx op_b = XEXP (if_info->cond, 1);
1833 rtx_insn *prev_insn;
1835 /* First, look to see if we put a constant in a register. */
1836 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1837 if (prev_insn
1838 && BLOCK_FOR_INSN (prev_insn)
1839 == BLOCK_FOR_INSN (if_info->cond_earliest)
1840 && INSN_P (prev_insn)
1841 && GET_CODE (PATTERN (prev_insn)) == SET)
1843 rtx src = find_reg_equal_equiv_note (prev_insn);
1844 if (!src)
1845 src = SET_SRC (PATTERN (prev_insn));
1846 if (CONST_INT_P (src))
1848 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1849 op_a = src;
1850 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1851 op_b = src;
1853 if (CONST_INT_P (op_a))
1855 std::swap (op_a, op_b);
1856 code = swap_condition (code);
1861 /* Now, look to see if we can get the right constant by
1862 adjusting the conditional. */
1863 if (CONST_INT_P (op_b))
1865 HOST_WIDE_INT desired_val = INTVAL (target);
1866 HOST_WIDE_INT actual_val = INTVAL (op_b);
1868 switch (code)
1870 case LT:
1871 if (actual_val == desired_val + 1)
1873 code = LE;
1874 op_b = GEN_INT (desired_val);
1876 break;
1877 case LE:
1878 if (actual_val == desired_val - 1)
1880 code = LT;
1881 op_b = GEN_INT (desired_val);
1883 break;
1884 case GT:
1885 if (actual_val == desired_val - 1)
1887 code = GE;
1888 op_b = GEN_INT (desired_val);
1890 break;
1891 case GE:
1892 if (actual_val == desired_val + 1)
1894 code = GT;
1895 op_b = GEN_INT (desired_val);
1897 break;
1898 default:
1899 break;
1903 /* If we made any changes, generate a new conditional that is
1904 equivalent to what we started with, but has the right
1905 constants in it. */
1906 if (code != GET_CODE (if_info->cond)
1907 || op_a != XEXP (if_info->cond, 0)
1908 || op_b != XEXP (if_info->cond, 1))
1910 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1911 *earliest = if_info->cond_earliest;
1912 return cond;
1916 cond = canonicalize_condition (if_info->jump, cond, reverse,
1917 earliest, target, HAVE_cbranchcc4, true);
1918 if (! cond || ! reg_mentioned_p (target, cond))
1919 return NULL;
1921 /* We almost certainly searched back to a different place.
1922 Need to re-verify correct lifetimes. */
1924 /* X may not be mentioned in the range (cond_earliest, jump]. */
1925 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1926 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1927 return NULL;
1929 /* A and B may not be modified in the range [cond_earliest, jump). */
1930 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1931 if (INSN_P (insn)
1932 && (modified_in_p (if_info->a, insn)
1933 || modified_in_p (if_info->b, insn)))
1934 return NULL;
1936 return cond;
1939 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1941 static int
1942 noce_try_minmax (struct noce_if_info *if_info)
1944 rtx cond, target;
1945 rtx_insn *earliest, *seq;
1946 enum rtx_code code, op;
1947 int unsignedp;
1949 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1950 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1951 to get the target to tell us... */
1952 if (HONOR_SIGNED_ZEROS (if_info->x)
1953 || HONOR_NANS (if_info->x))
1954 return FALSE;
1956 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1957 if (!cond)
1958 return FALSE;
1960 /* Verify the condition is of the form we expect, and canonicalize
1961 the comparison code. */
1962 code = GET_CODE (cond);
1963 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1965 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1966 return FALSE;
1968 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1970 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1971 return FALSE;
1972 code = swap_condition (code);
1974 else
1975 return FALSE;
1977 /* Determine what sort of operation this is. Note that the code is for
1978 a taken branch, so the code->operation mapping appears backwards. */
1979 switch (code)
1981 case LT:
1982 case LE:
1983 case UNLT:
1984 case UNLE:
1985 op = SMAX;
1986 unsignedp = 0;
1987 break;
1988 case GT:
1989 case GE:
1990 case UNGT:
1991 case UNGE:
1992 op = SMIN;
1993 unsignedp = 0;
1994 break;
1995 case LTU:
1996 case LEU:
1997 op = UMAX;
1998 unsignedp = 1;
1999 break;
2000 case GTU:
2001 case GEU:
2002 op = UMIN;
2003 unsignedp = 1;
2004 break;
2005 default:
2006 return FALSE;
2009 start_sequence ();
2011 target = expand_simple_binop (GET_MODE (if_info->x), op,
2012 if_info->a, if_info->b,
2013 if_info->x, unsignedp, OPTAB_WIDEN);
2014 if (! target)
2016 end_sequence ();
2017 return FALSE;
2019 if (target != if_info->x)
2020 noce_emit_move_insn (if_info->x, target);
2022 seq = end_ifcvt_sequence (if_info);
2023 if (!seq)
2024 return FALSE;
2026 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2027 if_info->cond = cond;
2028 if_info->cond_earliest = earliest;
2030 return TRUE;
2033 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2034 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2035 etc. */
2037 static int
2038 noce_try_abs (struct noce_if_info *if_info)
2040 rtx cond, target, a, b, c;
2041 rtx_insn *earliest, *seq;
2042 int negate;
2043 bool one_cmpl = false;
2045 /* Reject modes with signed zeros. */
2046 if (HONOR_SIGNED_ZEROS (if_info->x))
2047 return FALSE;
2049 /* Recognize A and B as constituting an ABS or NABS. The canonical
2050 form is a branch around the negation, taken when the object is the
2051 first operand of a comparison against 0 that evaluates to true. */
2052 a = if_info->a;
2053 b = if_info->b;
2054 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2055 negate = 0;
2056 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2058 std::swap (a, b);
2059 negate = 1;
2061 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2063 negate = 0;
2064 one_cmpl = true;
2066 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2068 std::swap (a, b);
2069 negate = 1;
2070 one_cmpl = true;
2072 else
2073 return FALSE;
2075 cond = noce_get_alt_condition (if_info, b, &earliest);
2076 if (!cond)
2077 return FALSE;
2079 /* Verify the condition is of the form we expect. */
2080 if (rtx_equal_p (XEXP (cond, 0), b))
2081 c = XEXP (cond, 1);
2082 else if (rtx_equal_p (XEXP (cond, 1), b))
2084 c = XEXP (cond, 0);
2085 negate = !negate;
2087 else
2088 return FALSE;
2090 /* Verify that C is zero. Search one step backward for a
2091 REG_EQUAL note or a simple source if necessary. */
2092 if (REG_P (c))
2094 rtx set;
2095 rtx_insn *insn = prev_nonnote_insn (earliest);
2096 if (insn
2097 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2098 && (set = single_set (insn))
2099 && rtx_equal_p (SET_DEST (set), c))
2101 rtx note = find_reg_equal_equiv_note (insn);
2102 if (note)
2103 c = XEXP (note, 0);
2104 else
2105 c = SET_SRC (set);
2107 else
2108 return FALSE;
2110 if (MEM_P (c)
2111 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2112 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2113 c = get_pool_constant (XEXP (c, 0));
2115 /* Work around funny ideas get_condition has wrt canonicalization.
2116 Note that these rtx constants are known to be CONST_INT, and
2117 therefore imply integer comparisons. */
2118 if (c == constm1_rtx && GET_CODE (cond) == GT)
2120 else if (c == const1_rtx && GET_CODE (cond) == LT)
2122 else if (c != CONST0_RTX (GET_MODE (b)))
2123 return FALSE;
2125 /* Determine what sort of operation this is. */
2126 switch (GET_CODE (cond))
2128 case LT:
2129 case LE:
2130 case UNLT:
2131 case UNLE:
2132 negate = !negate;
2133 break;
2134 case GT:
2135 case GE:
2136 case UNGT:
2137 case UNGE:
2138 break;
2139 default:
2140 return FALSE;
2143 start_sequence ();
2144 if (one_cmpl)
2145 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2146 if_info->x);
2147 else
2148 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2150 /* ??? It's a quandary whether cmove would be better here, especially
2151 for integers. Perhaps combine will clean things up. */
2152 if (target && negate)
2154 if (one_cmpl)
2155 target = expand_simple_unop (GET_MODE (target), NOT, target,
2156 if_info->x, 0);
2157 else
2158 target = expand_simple_unop (GET_MODE (target), NEG, target,
2159 if_info->x, 0);
2162 if (! target)
2164 end_sequence ();
2165 return FALSE;
2168 if (target != if_info->x)
2169 noce_emit_move_insn (if_info->x, target);
2171 seq = end_ifcvt_sequence (if_info);
2172 if (!seq)
2173 return FALSE;
2175 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2176 if_info->cond = cond;
2177 if_info->cond_earliest = earliest;
2179 return TRUE;
2182 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2184 static int
2185 noce_try_sign_mask (struct noce_if_info *if_info)
2187 rtx cond, t, m, c;
2188 rtx_insn *seq;
2189 machine_mode mode;
2190 enum rtx_code code;
2191 bool t_unconditional;
2193 cond = if_info->cond;
2194 code = GET_CODE (cond);
2195 m = XEXP (cond, 0);
2196 c = XEXP (cond, 1);
2198 t = NULL_RTX;
2199 if (if_info->a == const0_rtx)
2201 if ((code == LT && c == const0_rtx)
2202 || (code == LE && c == constm1_rtx))
2203 t = if_info->b;
2205 else if (if_info->b == const0_rtx)
2207 if ((code == GE && c == const0_rtx)
2208 || (code == GT && c == constm1_rtx))
2209 t = if_info->a;
2212 if (! t || side_effects_p (t))
2213 return FALSE;
2215 /* We currently don't handle different modes. */
2216 mode = GET_MODE (t);
2217 if (GET_MODE (m) != mode)
2218 return FALSE;
2220 /* This is only profitable if T is unconditionally executed/evaluated in the
2221 original insn sequence or T is cheap. The former happens if B is the
2222 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2223 INSN_B which can happen for e.g. conditional stores to memory. For the
2224 cost computation use the block TEST_BB where the evaluation will end up
2225 after the transformation. */
2226 t_unconditional =
2227 (t == if_info->b
2228 && (if_info->insn_b == NULL_RTX
2229 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2230 if (!(t_unconditional
2231 || (set_src_cost (t, optimize_bb_for_speed_p (if_info->test_bb))
2232 < COSTS_N_INSNS (2))))
2233 return FALSE;
2235 start_sequence ();
2236 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2237 "(signed) m >> 31" directly. This benefits targets with specialized
2238 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2239 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2240 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2241 : NULL_RTX;
2243 if (!t)
2245 end_sequence ();
2246 return FALSE;
2249 noce_emit_move_insn (if_info->x, t);
2251 seq = end_ifcvt_sequence (if_info);
2252 if (!seq)
2253 return FALSE;
2255 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2256 return TRUE;
2260 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2261 transformations. */
2263 static int
2264 noce_try_bitop (struct noce_if_info *if_info)
2266 rtx cond, x, a, result;
2267 rtx_insn *seq;
2268 machine_mode mode;
2269 enum rtx_code code;
2270 int bitnum;
2272 x = if_info->x;
2273 cond = if_info->cond;
2274 code = GET_CODE (cond);
2276 /* Check for no else condition. */
2277 if (! rtx_equal_p (x, if_info->b))
2278 return FALSE;
2280 /* Check for a suitable condition. */
2281 if (code != NE && code != EQ)
2282 return FALSE;
2283 if (XEXP (cond, 1) != const0_rtx)
2284 return FALSE;
2285 cond = XEXP (cond, 0);
2287 /* ??? We could also handle AND here. */
2288 if (GET_CODE (cond) == ZERO_EXTRACT)
2290 if (XEXP (cond, 1) != const1_rtx
2291 || !CONST_INT_P (XEXP (cond, 2))
2292 || ! rtx_equal_p (x, XEXP (cond, 0)))
2293 return FALSE;
2294 bitnum = INTVAL (XEXP (cond, 2));
2295 mode = GET_MODE (x);
2296 if (BITS_BIG_ENDIAN)
2297 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2298 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2299 return FALSE;
2301 else
2302 return FALSE;
2304 a = if_info->a;
2305 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2307 /* Check for "if (X & C) x = x op C". */
2308 if (! rtx_equal_p (x, XEXP (a, 0))
2309 || !CONST_INT_P (XEXP (a, 1))
2310 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2311 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2312 return FALSE;
2314 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2315 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2316 if (GET_CODE (a) == IOR)
2317 result = (code == NE) ? a : NULL_RTX;
2318 else if (code == NE)
2320 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2321 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2322 result = simplify_gen_binary (IOR, mode, x, result);
2324 else
2326 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2327 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2328 result = simplify_gen_binary (AND, mode, x, result);
2331 else if (GET_CODE (a) == AND)
2333 /* Check for "if (X & C) x &= ~C". */
2334 if (! rtx_equal_p (x, XEXP (a, 0))
2335 || !CONST_INT_P (XEXP (a, 1))
2336 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2337 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2338 return FALSE;
2340 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2341 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2342 result = (code == EQ) ? a : NULL_RTX;
2344 else
2345 return FALSE;
2347 if (result)
2349 start_sequence ();
2350 noce_emit_move_insn (x, result);
2351 seq = end_ifcvt_sequence (if_info);
2352 if (!seq)
2353 return FALSE;
2355 emit_insn_before_setloc (seq, if_info->jump,
2356 INSN_LOCATION (if_info->insn_a));
2358 return TRUE;
2362 /* Similar to get_condition, only the resulting condition must be
2363 valid at JUMP, instead of at EARLIEST.
2365 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2366 THEN block of the caller, and we have to reverse the condition. */
2368 static rtx
2369 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2371 rtx cond, set, tmp;
2372 bool reverse;
2374 if (! any_condjump_p (jump))
2375 return NULL_RTX;
2377 set = pc_set (jump);
2379 /* If this branches to JUMP_LABEL when the condition is false,
2380 reverse the condition. */
2381 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2382 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2384 /* We may have to reverse because the caller's if block is not canonical,
2385 i.e. the THEN block isn't the fallthrough block for the TEST block
2386 (see find_if_header). */
2387 if (then_else_reversed)
2388 reverse = !reverse;
2390 /* If the condition variable is a register and is MODE_INT, accept it. */
2392 cond = XEXP (SET_SRC (set), 0);
2393 tmp = XEXP (cond, 0);
2394 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2395 && (GET_MODE (tmp) != BImode
2396 || !targetm.small_register_classes_for_mode_p (BImode)))
2398 *earliest = jump;
2400 if (reverse)
2401 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2402 GET_MODE (cond), tmp, XEXP (cond, 1));
2403 return cond;
2406 /* Otherwise, fall back on canonicalize_condition to do the dirty
2407 work of manipulating MODE_CC values and COMPARE rtx codes. */
2408 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2409 NULL_RTX, HAVE_cbranchcc4, true);
2411 /* We don't handle side-effects in the condition, like handling
2412 REG_INC notes and making sure no duplicate conditions are emitted. */
2413 if (tmp != NULL_RTX && side_effects_p (tmp))
2414 return NULL_RTX;
2416 return tmp;
2419 /* Return true if OP is ok for if-then-else processing. */
2421 static int
2422 noce_operand_ok (const_rtx op)
2424 if (side_effects_p (op))
2425 return FALSE;
2427 /* We special-case memories, so handle any of them with
2428 no address side effects. */
2429 if (MEM_P (op))
2430 return ! side_effects_p (XEXP (op, 0));
2432 return ! may_trap_p (op);
2435 /* Return true if a write into MEM may trap or fault. */
2437 static bool
2438 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2440 rtx addr;
2442 if (MEM_READONLY_P (mem))
2443 return true;
2445 if (may_trap_or_fault_p (mem))
2446 return true;
2448 addr = XEXP (mem, 0);
2450 /* Call target hook to avoid the effects of -fpic etc.... */
2451 addr = targetm.delegitimize_address (addr);
2453 while (addr)
2454 switch (GET_CODE (addr))
2456 case CONST:
2457 case PRE_DEC:
2458 case PRE_INC:
2459 case POST_DEC:
2460 case POST_INC:
2461 case POST_MODIFY:
2462 addr = XEXP (addr, 0);
2463 break;
2464 case LO_SUM:
2465 case PRE_MODIFY:
2466 addr = XEXP (addr, 1);
2467 break;
2468 case PLUS:
2469 if (CONST_INT_P (XEXP (addr, 1)))
2470 addr = XEXP (addr, 0);
2471 else
2472 return false;
2473 break;
2474 case LABEL_REF:
2475 return true;
2476 case SYMBOL_REF:
2477 if (SYMBOL_REF_DECL (addr)
2478 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2479 return true;
2480 return false;
2481 default:
2482 return false;
2485 return false;
2488 /* Return whether we can use store speculation for MEM. TOP_BB is the
2489 basic block above the conditional block where we are considering
2490 doing the speculative store. We look for whether MEM is set
2491 unconditionally later in the function. */
2493 static bool
2494 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2496 basic_block dominator;
2498 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2499 dominator != NULL;
2500 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2502 rtx_insn *insn;
2504 FOR_BB_INSNS (dominator, insn)
2506 /* If we see something that might be a memory barrier, we
2507 have to stop looking. Even if the MEM is set later in
2508 the function, we still don't want to set it
2509 unconditionally before the barrier. */
2510 if (INSN_P (insn)
2511 && (volatile_insn_p (PATTERN (insn))
2512 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2513 return false;
2515 if (memory_must_be_modified_in_insn_p (mem, insn))
2516 return true;
2517 if (modified_in_p (XEXP (mem, 0), insn))
2518 return false;
2523 return false;
2526 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2527 it without using conditional execution. Return TRUE if we were successful
2528 at converting the block. */
2530 static int
2531 noce_process_if_block (struct noce_if_info *if_info)
2533 basic_block test_bb = if_info->test_bb; /* test block */
2534 basic_block then_bb = if_info->then_bb; /* THEN */
2535 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2536 basic_block join_bb = if_info->join_bb; /* JOIN */
2537 rtx_insn *jump = if_info->jump;
2538 rtx cond = if_info->cond;
2539 rtx_insn *insn_a, *insn_b;
2540 rtx set_a, set_b;
2541 rtx orig_x, x, a, b;
2542 rtx cc;
2544 /* We're looking for patterns of the form
2546 (1) if (...) x = a; else x = b;
2547 (2) x = b; if (...) x = a;
2548 (3) if (...) x = a; // as if with an initial x = x.
2550 The later patterns require jumps to be more expensive.
2552 ??? For future expansion, look for multiple X in such patterns. */
2554 /* Look for one of the potential sets. */
2555 insn_a = first_active_insn (then_bb);
2556 if (! insn_a
2557 || insn_a != last_active_insn (then_bb, FALSE)
2558 || (set_a = single_set (insn_a)) == NULL_RTX)
2559 return FALSE;
2561 x = SET_DEST (set_a);
2562 a = SET_SRC (set_a);
2564 /* Look for the other potential set. Make sure we've got equivalent
2565 destinations. */
2566 /* ??? This is overconservative. Storing to two different mems is
2567 as easy as conditionally computing the address. Storing to a
2568 single mem merely requires a scratch memory to use as one of the
2569 destination addresses; often the memory immediately below the
2570 stack pointer is available for this. */
2571 set_b = NULL_RTX;
2572 if (else_bb)
2574 insn_b = first_active_insn (else_bb);
2575 if (! insn_b
2576 || insn_b != last_active_insn (else_bb, FALSE)
2577 || (set_b = single_set (insn_b)) == NULL_RTX
2578 || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2579 return FALSE;
2581 else
2583 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2584 /* We're going to be moving the evaluation of B down from above
2585 COND_EARLIEST to JUMP. Make sure the relevant data is still
2586 intact. */
2587 if (! insn_b
2588 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2589 || !NONJUMP_INSN_P (insn_b)
2590 || (set_b = single_set (insn_b)) == NULL_RTX
2591 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2592 || ! noce_operand_ok (SET_SRC (set_b))
2593 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2594 || modified_between_p (SET_SRC (set_b), insn_b, jump)
2595 /* Avoid extending the lifetime of hard registers on small
2596 register class machines. */
2597 || (REG_P (SET_SRC (set_b))
2598 && HARD_REGISTER_P (SET_SRC (set_b))
2599 && targetm.small_register_classes_for_mode_p
2600 (GET_MODE (SET_SRC (set_b))))
2601 /* Likewise with X. In particular this can happen when
2602 noce_get_condition looks farther back in the instruction
2603 stream than one might expect. */
2604 || reg_overlap_mentioned_p (x, cond)
2605 || reg_overlap_mentioned_p (x, a)
2606 || modified_between_p (x, insn_b, jump))
2608 insn_b = NULL;
2609 set_b = NULL_RTX;
2613 /* If x has side effects then only the if-then-else form is safe to
2614 convert. But even in that case we would need to restore any notes
2615 (such as REG_INC) at then end. That can be tricky if
2616 noce_emit_move_insn expands to more than one insn, so disable the
2617 optimization entirely for now if there are side effects. */
2618 if (side_effects_p (x))
2619 return FALSE;
2621 b = (set_b ? SET_SRC (set_b) : x);
2623 /* Only operate on register destinations, and even then avoid extending
2624 the lifetime of hard registers on small register class machines. */
2625 orig_x = x;
2626 if (!REG_P (x)
2627 || (HARD_REGISTER_P (x)
2628 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2630 if (GET_MODE (x) == BLKmode)
2631 return FALSE;
2633 if (GET_CODE (x) == ZERO_EXTRACT
2634 && (!CONST_INT_P (XEXP (x, 1))
2635 || !CONST_INT_P (XEXP (x, 2))))
2636 return FALSE;
2638 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2639 ? XEXP (x, 0) : x));
2642 /* Don't operate on sources that may trap or are volatile. */
2643 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2644 return FALSE;
2646 retry:
2647 /* Set up the info block for our subroutines. */
2648 if_info->insn_a = insn_a;
2649 if_info->insn_b = insn_b;
2650 if_info->x = x;
2651 if_info->a = a;
2652 if_info->b = b;
2654 /* Skip it if the instruction to be moved might clobber CC. */
2655 cc = cc_in_cond (cond);
2656 if (cc
2657 && (set_of (cc, insn_a)
2658 || (insn_b && set_of (cc, insn_b))))
2659 return FALSE;
2661 /* Try optimizations in some approximation of a useful order. */
2662 /* ??? Should first look to see if X is live incoming at all. If it
2663 isn't, we don't need anything but an unconditional set. */
2665 /* Look and see if A and B are really the same. Avoid creating silly
2666 cmove constructs that no one will fix up later. */
2667 if (rtx_interchangeable_p (a, b))
2669 /* If we have an INSN_B, we don't have to create any new rtl. Just
2670 move the instruction that we already have. If we don't have an
2671 INSN_B, that means that A == X, and we've got a noop move. In
2672 that case don't do anything and let the code below delete INSN_A. */
2673 if (insn_b && else_bb)
2675 rtx note;
2677 if (else_bb && insn_b == BB_END (else_bb))
2678 BB_END (else_bb) = PREV_INSN (insn_b);
2679 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2681 /* If there was a REG_EQUAL note, delete it since it may have been
2682 true due to this insn being after a jump. */
2683 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2684 remove_note (insn_b, note);
2686 insn_b = NULL;
2688 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2689 x must be executed twice. */
2690 else if (insn_b && side_effects_p (orig_x))
2691 return FALSE;
2693 x = orig_x;
2694 goto success;
2697 if (!set_b && MEM_P (orig_x))
2699 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2700 for optimizations if writing to x may trap or fault,
2701 i.e. it's a memory other than a static var or a stack slot,
2702 is misaligned on strict aligned machines or is read-only. If
2703 x is a read-only memory, then the program is valid only if we
2704 avoid the store into it. If there are stores on both the
2705 THEN and ELSE arms, then we can go ahead with the conversion;
2706 either the program is broken, or the condition is always
2707 false such that the other memory is selected. */
2708 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2709 return FALSE;
2711 /* Avoid store speculation: given "if (...) x = a" where x is a
2712 MEM, we only want to do the store if x is always set
2713 somewhere in the function. This avoids cases like
2714 if (pthread_mutex_trylock(mutex))
2715 ++global_variable;
2716 where we only want global_variable to be changed if the mutex
2717 is held. FIXME: This should ideally be expressed directly in
2718 RTL somehow. */
2719 if (!noce_can_store_speculate_p (test_bb, orig_x))
2720 return FALSE;
2723 if (noce_try_move (if_info))
2724 goto success;
2725 if (noce_try_store_flag (if_info))
2726 goto success;
2727 if (noce_try_bitop (if_info))
2728 goto success;
2729 if (noce_try_minmax (if_info))
2730 goto success;
2731 if (noce_try_abs (if_info))
2732 goto success;
2733 if (HAVE_conditional_move
2734 && noce_try_cmove (if_info))
2735 goto success;
2736 if (! targetm.have_conditional_execution ())
2738 if (noce_try_store_flag_constants (if_info))
2739 goto success;
2740 if (noce_try_addcc (if_info))
2741 goto success;
2742 if (noce_try_store_flag_mask (if_info))
2743 goto success;
2744 if (HAVE_conditional_move
2745 && noce_try_cmove_arith (if_info))
2746 goto success;
2747 if (noce_try_sign_mask (if_info))
2748 goto success;
2751 if (!else_bb && set_b)
2753 insn_b = NULL;
2754 set_b = NULL_RTX;
2755 b = orig_x;
2756 goto retry;
2759 return FALSE;
2761 success:
2763 /* If we used a temporary, fix it up now. */
2764 if (orig_x != x)
2766 rtx_insn *seq;
2768 start_sequence ();
2769 noce_emit_move_insn (orig_x, x);
2770 seq = get_insns ();
2771 set_used_flags (orig_x);
2772 unshare_all_rtl_in_chain (seq);
2773 end_sequence ();
2775 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2778 /* The original THEN and ELSE blocks may now be removed. The test block
2779 must now jump to the join block. If the test block and the join block
2780 can be merged, do so. */
2781 if (else_bb)
2783 delete_basic_block (else_bb);
2784 num_true_changes++;
2786 else
2787 remove_edge (find_edge (test_bb, join_bb));
2789 remove_edge (find_edge (then_bb, join_bb));
2790 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2791 delete_basic_block (then_bb);
2792 num_true_changes++;
2794 if (can_merge_blocks_p (test_bb, join_bb))
2796 merge_blocks (test_bb, join_bb);
2797 num_true_changes++;
2800 num_updated_if_blocks++;
2801 return TRUE;
2804 /* Check whether a block is suitable for conditional move conversion.
2805 Every insn must be a simple set of a register to a constant or a
2806 register. For each assignment, store the value in the pointer map
2807 VALS, keyed indexed by register pointer, then store the register
2808 pointer in REGS. COND is the condition we will test. */
2810 static int
2811 check_cond_move_block (basic_block bb,
2812 hash_map<rtx, rtx> *vals,
2813 vec<rtx> *regs,
2814 rtx cond)
2816 rtx_insn *insn;
2817 rtx cc = cc_in_cond (cond);
2819 /* We can only handle simple jumps at the end of the basic block.
2820 It is almost impossible to update the CFG otherwise. */
2821 insn = BB_END (bb);
2822 if (JUMP_P (insn) && !onlyjump_p (insn))
2823 return FALSE;
2825 FOR_BB_INSNS (bb, insn)
2827 rtx set, dest, src;
2829 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2830 continue;
2831 set = single_set (insn);
2832 if (!set)
2833 return FALSE;
2835 dest = SET_DEST (set);
2836 src = SET_SRC (set);
2837 if (!REG_P (dest)
2838 || (HARD_REGISTER_P (dest)
2839 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2840 return FALSE;
2842 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2843 return FALSE;
2845 if (side_effects_p (src) || side_effects_p (dest))
2846 return FALSE;
2848 if (may_trap_p (src) || may_trap_p (dest))
2849 return FALSE;
2851 /* Don't try to handle this if the source register was
2852 modified earlier in the block. */
2853 if ((REG_P (src)
2854 && vals->get (src))
2855 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2856 && vals->get (SUBREG_REG (src))))
2857 return FALSE;
2859 /* Don't try to handle this if the destination register was
2860 modified earlier in the block. */
2861 if (vals->get (dest))
2862 return FALSE;
2864 /* Don't try to handle this if the condition uses the
2865 destination register. */
2866 if (reg_overlap_mentioned_p (dest, cond))
2867 return FALSE;
2869 /* Don't try to handle this if the source register is modified
2870 later in the block. */
2871 if (!CONSTANT_P (src)
2872 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2873 return FALSE;
2875 /* Skip it if the instruction to be moved might clobber CC. */
2876 if (cc && set_of (cc, insn))
2877 return FALSE;
2879 vals->put (dest, src);
2881 regs->safe_push (dest);
2884 return TRUE;
2887 /* Given a basic block BB suitable for conditional move conversion,
2888 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
2889 the register values depending on COND, emit the insns in the block as
2890 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2891 processed. The caller has started a sequence for the conversion.
2892 Return true if successful, false if something goes wrong. */
2894 static bool
2895 cond_move_convert_if_block (struct noce_if_info *if_infop,
2896 basic_block bb, rtx cond,
2897 hash_map<rtx, rtx> *then_vals,
2898 hash_map<rtx, rtx> *else_vals,
2899 bool else_block_p)
2901 enum rtx_code code;
2902 rtx_insn *insn;
2903 rtx cond_arg0, cond_arg1;
2905 code = GET_CODE (cond);
2906 cond_arg0 = XEXP (cond, 0);
2907 cond_arg1 = XEXP (cond, 1);
2909 FOR_BB_INSNS (bb, insn)
2911 rtx set, target, dest, t, e;
2913 /* ??? Maybe emit conditional debug insn? */
2914 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2915 continue;
2916 set = single_set (insn);
2917 gcc_assert (set && REG_P (SET_DEST (set)));
2919 dest = SET_DEST (set);
2921 rtx *then_slot = then_vals->get (dest);
2922 rtx *else_slot = else_vals->get (dest);
2923 t = then_slot ? *then_slot : NULL_RTX;
2924 e = else_slot ? *else_slot : NULL_RTX;
2926 if (else_block_p)
2928 /* If this register was set in the then block, we already
2929 handled this case there. */
2930 if (t)
2931 continue;
2932 t = dest;
2933 gcc_assert (e);
2935 else
2937 gcc_assert (t);
2938 if (!e)
2939 e = dest;
2942 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2943 t, e);
2944 if (!target)
2945 return false;
2947 if (target != dest)
2948 noce_emit_move_insn (dest, target);
2951 return true;
2954 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2955 it using only conditional moves. Return TRUE if we were successful at
2956 converting the block. */
2958 static int
2959 cond_move_process_if_block (struct noce_if_info *if_info)
2961 basic_block test_bb = if_info->test_bb;
2962 basic_block then_bb = if_info->then_bb;
2963 basic_block else_bb = if_info->else_bb;
2964 basic_block join_bb = if_info->join_bb;
2965 rtx_insn *jump = if_info->jump;
2966 rtx cond = if_info->cond;
2967 rtx_insn *seq, *loc_insn;
2968 rtx reg;
2969 int c;
2970 vec<rtx> then_regs = vNULL;
2971 vec<rtx> else_regs = vNULL;
2972 unsigned int i;
2973 int success_p = FALSE;
2975 /* Build a mapping for each block to the value used for each
2976 register. */
2977 hash_map<rtx, rtx> then_vals;
2978 hash_map<rtx, rtx> else_vals;
2980 /* Make sure the blocks are suitable. */
2981 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
2982 || (else_bb
2983 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
2984 goto done;
2986 /* Make sure the blocks can be used together. If the same register
2987 is set in both blocks, and is not set to a constant in both
2988 cases, then both blocks must set it to the same register. We
2989 have already verified that if it is set to a register, that the
2990 source register does not change after the assignment. Also count
2991 the number of registers set in only one of the blocks. */
2992 c = 0;
2993 FOR_EACH_VEC_ELT (then_regs, i, reg)
2995 rtx *then_slot = then_vals.get (reg);
2996 rtx *else_slot = else_vals.get (reg);
2998 gcc_checking_assert (then_slot);
2999 if (!else_slot)
3000 ++c;
3001 else
3003 rtx then_val = *then_slot;
3004 rtx else_val = *else_slot;
3005 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3006 && !rtx_equal_p (then_val, else_val))
3007 goto done;
3011 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3012 FOR_EACH_VEC_ELT (else_regs, i, reg)
3014 gcc_checking_assert (else_vals.get (reg));
3015 if (!then_vals.get (reg))
3016 ++c;
3019 /* Make sure it is reasonable to convert this block. What matters
3020 is the number of assignments currently made in only one of the
3021 branches, since if we convert we are going to always execute
3022 them. */
3023 if (c > MAX_CONDITIONAL_EXECUTE)
3024 goto done;
3026 /* Try to emit the conditional moves. First do the then block,
3027 then do anything left in the else blocks. */
3028 start_sequence ();
3029 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3030 &then_vals, &else_vals, false)
3031 || (else_bb
3032 && !cond_move_convert_if_block (if_info, else_bb, cond,
3033 &then_vals, &else_vals, true)))
3035 end_sequence ();
3036 goto done;
3038 seq = end_ifcvt_sequence (if_info);
3039 if (!seq)
3040 goto done;
3042 loc_insn = first_active_insn (then_bb);
3043 if (!loc_insn)
3045 loc_insn = first_active_insn (else_bb);
3046 gcc_assert (loc_insn);
3048 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3050 if (else_bb)
3052 delete_basic_block (else_bb);
3053 num_true_changes++;
3055 else
3056 remove_edge (find_edge (test_bb, join_bb));
3058 remove_edge (find_edge (then_bb, join_bb));
3059 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3060 delete_basic_block (then_bb);
3061 num_true_changes++;
3063 if (can_merge_blocks_p (test_bb, join_bb))
3065 merge_blocks (test_bb, join_bb);
3066 num_true_changes++;
3069 num_updated_if_blocks++;
3071 success_p = TRUE;
3073 done:
3074 then_regs.release ();
3075 else_regs.release ();
3076 return success_p;
3080 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3081 IF-THEN-ELSE-JOIN block.
3083 If so, we'll try to convert the insns to not require the branch,
3084 using only transformations that do not require conditional execution.
3086 Return TRUE if we were successful at converting the block. */
3088 static int
3089 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3090 int pass)
3092 basic_block then_bb, else_bb, join_bb;
3093 bool then_else_reversed = false;
3094 rtx_insn *jump;
3095 rtx cond;
3096 rtx_insn *cond_earliest;
3097 struct noce_if_info if_info;
3099 /* We only ever should get here before reload. */
3100 gcc_assert (!reload_completed);
3102 /* Recognize an IF-THEN-ELSE-JOIN block. */
3103 if (single_pred_p (then_edge->dest)
3104 && single_succ_p (then_edge->dest)
3105 && single_pred_p (else_edge->dest)
3106 && single_succ_p (else_edge->dest)
3107 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3109 then_bb = then_edge->dest;
3110 else_bb = else_edge->dest;
3111 join_bb = single_succ (then_bb);
3113 /* Recognize an IF-THEN-JOIN block. */
3114 else if (single_pred_p (then_edge->dest)
3115 && single_succ_p (then_edge->dest)
3116 && single_succ (then_edge->dest) == else_edge->dest)
3118 then_bb = then_edge->dest;
3119 else_bb = NULL_BLOCK;
3120 join_bb = else_edge->dest;
3122 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3123 of basic blocks in cfglayout mode does not matter, so the fallthrough
3124 edge can go to any basic block (and not just to bb->next_bb, like in
3125 cfgrtl mode). */
3126 else if (single_pred_p (else_edge->dest)
3127 && single_succ_p (else_edge->dest)
3128 && single_succ (else_edge->dest) == then_edge->dest)
3130 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3131 To make this work, we have to invert the THEN and ELSE blocks
3132 and reverse the jump condition. */
3133 then_bb = else_edge->dest;
3134 else_bb = NULL_BLOCK;
3135 join_bb = single_succ (then_bb);
3136 then_else_reversed = true;
3138 else
3139 /* Not a form we can handle. */
3140 return FALSE;
3142 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3143 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3144 return FALSE;
3145 if (else_bb
3146 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3147 return FALSE;
3149 num_possible_if_blocks++;
3151 if (dump_file)
3153 fprintf (dump_file,
3154 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3155 (else_bb) ? "-ELSE" : "",
3156 pass, test_bb->index, then_bb->index);
3158 if (else_bb)
3159 fprintf (dump_file, ", else %d", else_bb->index);
3161 fprintf (dump_file, ", join %d\n", join_bb->index);
3164 /* If the conditional jump is more than just a conditional
3165 jump, then we can not do if-conversion on this block. */
3166 jump = BB_END (test_bb);
3167 if (! onlyjump_p (jump))
3168 return FALSE;
3170 /* If this is not a standard conditional jump, we can't parse it. */
3171 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3172 if (!cond)
3173 return FALSE;
3175 /* We must be comparing objects whose modes imply the size. */
3176 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3177 return FALSE;
3179 /* Initialize an IF_INFO struct to pass around. */
3180 memset (&if_info, 0, sizeof if_info);
3181 if_info.test_bb = test_bb;
3182 if_info.then_bb = then_bb;
3183 if_info.else_bb = else_bb;
3184 if_info.join_bb = join_bb;
3185 if_info.cond = cond;
3186 if_info.cond_earliest = cond_earliest;
3187 if_info.jump = jump;
3188 if_info.then_else_reversed = then_else_reversed;
3189 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3190 predictable_edge_p (then_edge));
3192 /* Do the real work. */
3194 if (noce_process_if_block (&if_info))
3195 return TRUE;
3197 if (HAVE_conditional_move
3198 && cond_move_process_if_block (&if_info))
3199 return TRUE;
3201 return FALSE;
3205 /* Merge the blocks and mark for local life update. */
3207 static void
3208 merge_if_block (struct ce_if_block * ce_info)
3210 basic_block test_bb = ce_info->test_bb; /* last test block */
3211 basic_block then_bb = ce_info->then_bb; /* THEN */
3212 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3213 basic_block join_bb = ce_info->join_bb; /* join block */
3214 basic_block combo_bb;
3216 /* All block merging is done into the lower block numbers. */
3218 combo_bb = test_bb;
3219 df_set_bb_dirty (test_bb);
3221 /* Merge any basic blocks to handle && and || subtests. Each of
3222 the blocks are on the fallthru path from the predecessor block. */
3223 if (ce_info->num_multiple_test_blocks > 0)
3225 basic_block bb = test_bb;
3226 basic_block last_test_bb = ce_info->last_test_bb;
3227 basic_block fallthru = block_fallthru (bb);
3231 bb = fallthru;
3232 fallthru = block_fallthru (bb);
3233 merge_blocks (combo_bb, bb);
3234 num_true_changes++;
3236 while (bb != last_test_bb);
3239 /* Merge TEST block into THEN block. Normally the THEN block won't have a
3240 label, but it might if there were || tests. That label's count should be
3241 zero, and it normally should be removed. */
3243 if (then_bb)
3245 /* If THEN_BB has no successors, then there's a BARRIER after it.
3246 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3247 is no longer needed, and in fact it is incorrect to leave it in
3248 the insn stream. */
3249 if (EDGE_COUNT (then_bb->succs) == 0
3250 && EDGE_COUNT (combo_bb->succs) > 1)
3252 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3253 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3254 end = NEXT_INSN (end);
3256 if (end && BARRIER_P (end))
3257 delete_insn (end);
3259 merge_blocks (combo_bb, then_bb);
3260 num_true_changes++;
3263 /* The ELSE block, if it existed, had a label. That label count
3264 will almost always be zero, but odd things can happen when labels
3265 get their addresses taken. */
3266 if (else_bb)
3268 /* If ELSE_BB has no successors, then there's a BARRIER after it.
3269 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3270 is no longer needed, and in fact it is incorrect to leave it in
3271 the insn stream. */
3272 if (EDGE_COUNT (else_bb->succs) == 0
3273 && EDGE_COUNT (combo_bb->succs) > 1)
3275 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3276 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3277 end = NEXT_INSN (end);
3279 if (end && BARRIER_P (end))
3280 delete_insn (end);
3282 merge_blocks (combo_bb, else_bb);
3283 num_true_changes++;
3286 /* If there was no join block reported, that means it was not adjacent
3287 to the others, and so we cannot merge them. */
3289 if (! join_bb)
3291 rtx_insn *last = BB_END (combo_bb);
3293 /* The outgoing edge for the current COMBO block should already
3294 be correct. Verify this. */
3295 if (EDGE_COUNT (combo_bb->succs) == 0)
3296 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3297 || (NONJUMP_INSN_P (last)
3298 && GET_CODE (PATTERN (last)) == TRAP_IF
3299 && (TRAP_CONDITION (PATTERN (last))
3300 == const_true_rtx)));
3302 else
3303 /* There should still be something at the end of the THEN or ELSE
3304 blocks taking us to our final destination. */
3305 gcc_assert (JUMP_P (last)
3306 || (EDGE_SUCC (combo_bb, 0)->dest
3307 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3308 && CALL_P (last)
3309 && SIBLING_CALL_P (last))
3310 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3311 && can_throw_internal (last)));
3314 /* The JOIN block may have had quite a number of other predecessors too.
3315 Since we've already merged the TEST, THEN and ELSE blocks, we should
3316 have only one remaining edge from our if-then-else diamond. If there
3317 is more than one remaining edge, it must come from elsewhere. There
3318 may be zero incoming edges if the THEN block didn't actually join
3319 back up (as with a call to a non-return function). */
3320 else if (EDGE_COUNT (join_bb->preds) < 2
3321 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3323 /* We can merge the JOIN cleanly and update the dataflow try
3324 again on this pass.*/
3325 merge_blocks (combo_bb, join_bb);
3326 num_true_changes++;
3328 else
3330 /* We cannot merge the JOIN. */
3332 /* The outgoing edge for the current COMBO block should already
3333 be correct. Verify this. */
3334 gcc_assert (single_succ_p (combo_bb)
3335 && single_succ (combo_bb) == join_bb);
3337 /* Remove the jump and cruft from the end of the COMBO block. */
3338 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3339 tidy_fallthru_edge (single_succ_edge (combo_bb));
3342 num_updated_if_blocks++;
3345 /* Find a block ending in a simple IF condition and try to transform it
3346 in some way. When converting a multi-block condition, put the new code
3347 in the first such block and delete the rest. Return a pointer to this
3348 first block if some transformation was done. Return NULL otherwise. */
3350 static basic_block
3351 find_if_header (basic_block test_bb, int pass)
3353 ce_if_block ce_info;
3354 edge then_edge;
3355 edge else_edge;
3357 /* The kind of block we're looking for has exactly two successors. */
3358 if (EDGE_COUNT (test_bb->succs) != 2)
3359 return NULL;
3361 then_edge = EDGE_SUCC (test_bb, 0);
3362 else_edge = EDGE_SUCC (test_bb, 1);
3364 if (df_get_bb_dirty (then_edge->dest))
3365 return NULL;
3366 if (df_get_bb_dirty (else_edge->dest))
3367 return NULL;
3369 /* Neither edge should be abnormal. */
3370 if ((then_edge->flags & EDGE_COMPLEX)
3371 || (else_edge->flags & EDGE_COMPLEX))
3372 return NULL;
3374 /* Nor exit the loop. */
3375 if ((then_edge->flags & EDGE_LOOP_EXIT)
3376 || (else_edge->flags & EDGE_LOOP_EXIT))
3377 return NULL;
3379 /* The THEN edge is canonically the one that falls through. */
3380 if (then_edge->flags & EDGE_FALLTHRU)
3382 else if (else_edge->flags & EDGE_FALLTHRU)
3383 std::swap (then_edge, else_edge);
3384 else
3385 /* Otherwise this must be a multiway branch of some sort. */
3386 return NULL;
3388 memset (&ce_info, 0, sizeof (ce_info));
3389 ce_info.test_bb = test_bb;
3390 ce_info.then_bb = then_edge->dest;
3391 ce_info.else_bb = else_edge->dest;
3392 ce_info.pass = pass;
3394 #ifdef IFCVT_MACHDEP_INIT
3395 IFCVT_MACHDEP_INIT (&ce_info);
3396 #endif
3398 if (!reload_completed
3399 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3400 goto success;
3402 if (reload_completed
3403 && targetm.have_conditional_execution ()
3404 && cond_exec_find_if_block (&ce_info))
3405 goto success;
3407 if (targetm.have_trap ()
3408 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3409 && find_cond_trap (test_bb, then_edge, else_edge))
3410 goto success;
3412 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3413 && (reload_completed || !targetm.have_conditional_execution ()))
3415 if (find_if_case_1 (test_bb, then_edge, else_edge))
3416 goto success;
3417 if (find_if_case_2 (test_bb, then_edge, else_edge))
3418 goto success;
3421 return NULL;
3423 success:
3424 if (dump_file)
3425 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3426 /* Set this so we continue looking. */
3427 cond_exec_changed_p = TRUE;
3428 return ce_info.test_bb;
3431 /* Return true if a block has two edges, one of which falls through to the next
3432 block, and the other jumps to a specific block, so that we can tell if the
3433 block is part of an && test or an || test. Returns either -1 or the number
3434 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3436 static int
3437 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3439 edge cur_edge;
3440 int fallthru_p = FALSE;
3441 int jump_p = FALSE;
3442 rtx_insn *insn;
3443 rtx_insn *end;
3444 int n_insns = 0;
3445 edge_iterator ei;
3447 if (!cur_bb || !target_bb)
3448 return -1;
3450 /* If no edges, obviously it doesn't jump or fallthru. */
3451 if (EDGE_COUNT (cur_bb->succs) == 0)
3452 return FALSE;
3454 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3456 if (cur_edge->flags & EDGE_COMPLEX)
3457 /* Anything complex isn't what we want. */
3458 return -1;
3460 else if (cur_edge->flags & EDGE_FALLTHRU)
3461 fallthru_p = TRUE;
3463 else if (cur_edge->dest == target_bb)
3464 jump_p = TRUE;
3466 else
3467 return -1;
3470 if ((jump_p & fallthru_p) == 0)
3471 return -1;
3473 /* Don't allow calls in the block, since this is used to group && and ||
3474 together for conditional execution support. ??? we should support
3475 conditional execution support across calls for IA-64 some day, but
3476 for now it makes the code simpler. */
3477 end = BB_END (cur_bb);
3478 insn = BB_HEAD (cur_bb);
3480 while (insn != NULL_RTX)
3482 if (CALL_P (insn))
3483 return -1;
3485 if (INSN_P (insn)
3486 && !JUMP_P (insn)
3487 && !DEBUG_INSN_P (insn)
3488 && GET_CODE (PATTERN (insn)) != USE
3489 && GET_CODE (PATTERN (insn)) != CLOBBER)
3490 n_insns++;
3492 if (insn == end)
3493 break;
3495 insn = NEXT_INSN (insn);
3498 return n_insns;
3501 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3502 block. If so, we'll try to convert the insns to not require the branch.
3503 Return TRUE if we were successful at converting the block. */
3505 static int
3506 cond_exec_find_if_block (struct ce_if_block * ce_info)
3508 basic_block test_bb = ce_info->test_bb;
3509 basic_block then_bb = ce_info->then_bb;
3510 basic_block else_bb = ce_info->else_bb;
3511 basic_block join_bb = NULL_BLOCK;
3512 edge cur_edge;
3513 basic_block next;
3514 edge_iterator ei;
3516 ce_info->last_test_bb = test_bb;
3518 /* We only ever should get here after reload,
3519 and if we have conditional execution. */
3520 gcc_assert (reload_completed && targetm.have_conditional_execution ());
3522 /* Discover if any fall through predecessors of the current test basic block
3523 were && tests (which jump to the else block) or || tests (which jump to
3524 the then block). */
3525 if (single_pred_p (test_bb)
3526 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3528 basic_block bb = single_pred (test_bb);
3529 basic_block target_bb;
3530 int max_insns = MAX_CONDITIONAL_EXECUTE;
3531 int n_insns;
3533 /* Determine if the preceding block is an && or || block. */
3534 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3536 ce_info->and_and_p = TRUE;
3537 target_bb = else_bb;
3539 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3541 ce_info->and_and_p = FALSE;
3542 target_bb = then_bb;
3544 else
3545 target_bb = NULL_BLOCK;
3547 if (target_bb && n_insns <= max_insns)
3549 int total_insns = 0;
3550 int blocks = 0;
3552 ce_info->last_test_bb = test_bb;
3554 /* Found at least one && or || block, look for more. */
3557 ce_info->test_bb = test_bb = bb;
3558 total_insns += n_insns;
3559 blocks++;
3561 if (!single_pred_p (bb))
3562 break;
3564 bb = single_pred (bb);
3565 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3567 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3569 ce_info->num_multiple_test_blocks = blocks;
3570 ce_info->num_multiple_test_insns = total_insns;
3572 if (ce_info->and_and_p)
3573 ce_info->num_and_and_blocks = blocks;
3574 else
3575 ce_info->num_or_or_blocks = blocks;
3579 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3580 other than any || blocks which jump to the THEN block. */
3581 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3582 return FALSE;
3584 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3585 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3587 if (cur_edge->flags & EDGE_COMPLEX)
3588 return FALSE;
3591 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3593 if (cur_edge->flags & EDGE_COMPLEX)
3594 return FALSE;
3597 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3598 if (EDGE_COUNT (then_bb->succs) > 0
3599 && (!single_succ_p (then_bb)
3600 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3601 || (epilogue_completed
3602 && tablejump_p (BB_END (then_bb), NULL, NULL))))
3603 return FALSE;
3605 /* If the THEN block has no successors, conditional execution can still
3606 make a conditional call. Don't do this unless the ELSE block has
3607 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3608 Check for the last insn of the THEN block being an indirect jump, which
3609 is listed as not having any successors, but confuses the rest of the CE
3610 code processing. ??? we should fix this in the future. */
3611 if (EDGE_COUNT (then_bb->succs) == 0)
3613 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3615 rtx_insn *last_insn = BB_END (then_bb);
3617 while (last_insn
3618 && NOTE_P (last_insn)
3619 && last_insn != BB_HEAD (then_bb))
3620 last_insn = PREV_INSN (last_insn);
3622 if (last_insn
3623 && JUMP_P (last_insn)
3624 && ! simplejump_p (last_insn))
3625 return FALSE;
3627 join_bb = else_bb;
3628 else_bb = NULL_BLOCK;
3630 else
3631 return FALSE;
3634 /* If the THEN block's successor is the other edge out of the TEST block,
3635 then we have an IF-THEN combo without an ELSE. */
3636 else if (single_succ (then_bb) == else_bb)
3638 join_bb = else_bb;
3639 else_bb = NULL_BLOCK;
3642 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3643 has exactly one predecessor and one successor, and the outgoing edge
3644 is not complex, then we have an IF-THEN-ELSE combo. */
3645 else if (single_succ_p (else_bb)
3646 && single_succ (then_bb) == single_succ (else_bb)
3647 && single_pred_p (else_bb)
3648 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3649 && !(epilogue_completed
3650 && tablejump_p (BB_END (else_bb), NULL, NULL)))
3651 join_bb = single_succ (else_bb);
3653 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3654 else
3655 return FALSE;
3657 num_possible_if_blocks++;
3659 if (dump_file)
3661 fprintf (dump_file,
3662 "\nIF-THEN%s block found, pass %d, start block %d "
3663 "[insn %d], then %d [%d]",
3664 (else_bb) ? "-ELSE" : "",
3665 ce_info->pass,
3666 test_bb->index,
3667 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3668 then_bb->index,
3669 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3671 if (else_bb)
3672 fprintf (dump_file, ", else %d [%d]",
3673 else_bb->index,
3674 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3676 fprintf (dump_file, ", join %d [%d]",
3677 join_bb->index,
3678 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3680 if (ce_info->num_multiple_test_blocks > 0)
3681 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3682 ce_info->num_multiple_test_blocks,
3683 (ce_info->and_and_p) ? "&&" : "||",
3684 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3685 ce_info->last_test_bb->index,
3686 ((BB_HEAD (ce_info->last_test_bb))
3687 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3688 : -1));
3690 fputc ('\n', dump_file);
3693 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3694 first condition for free, since we've already asserted that there's a
3695 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3696 we checked the FALLTHRU flag, those are already adjacent to the last IF
3697 block. */
3698 /* ??? As an enhancement, move the ELSE block. Have to deal with
3699 BLOCK notes, if by no other means than backing out the merge if they
3700 exist. Sticky enough I don't want to think about it now. */
3701 next = then_bb;
3702 if (else_bb && (next = next->next_bb) != else_bb)
3703 return FALSE;
3704 if ((next = next->next_bb) != join_bb
3705 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3707 if (else_bb)
3708 join_bb = NULL;
3709 else
3710 return FALSE;
3713 /* Do the real work. */
3715 ce_info->else_bb = else_bb;
3716 ce_info->join_bb = join_bb;
3718 /* If we have && and || tests, try to first handle combining the && and ||
3719 tests into the conditional code, and if that fails, go back and handle
3720 it without the && and ||, which at present handles the && case if there
3721 was no ELSE block. */
3722 if (cond_exec_process_if_block (ce_info, TRUE))
3723 return TRUE;
3725 if (ce_info->num_multiple_test_blocks)
3727 cancel_changes (0);
3729 if (cond_exec_process_if_block (ce_info, FALSE))
3730 return TRUE;
3733 return FALSE;
3736 /* Convert a branch over a trap, or a branch
3737 to a trap, into a conditional trap. */
3739 static int
3740 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3742 basic_block then_bb = then_edge->dest;
3743 basic_block else_bb = else_edge->dest;
3744 basic_block other_bb, trap_bb;
3745 rtx_insn *trap, *jump;
3746 rtx cond;
3747 rtx_insn *cond_earliest;
3748 enum rtx_code code;
3750 /* Locate the block with the trap instruction. */
3751 /* ??? While we look for no successors, we really ought to allow
3752 EH successors. Need to fix merge_if_block for that to work. */
3753 if ((trap = block_has_only_trap (then_bb)) != NULL)
3754 trap_bb = then_bb, other_bb = else_bb;
3755 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3756 trap_bb = else_bb, other_bb = then_bb;
3757 else
3758 return FALSE;
3760 if (dump_file)
3762 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3763 test_bb->index, trap_bb->index);
3766 /* If this is not a standard conditional jump, we can't parse it. */
3767 jump = BB_END (test_bb);
3768 cond = noce_get_condition (jump, &cond_earliest, false);
3769 if (! cond)
3770 return FALSE;
3772 /* If the conditional jump is more than just a conditional jump, then
3773 we can not do if-conversion on this block. */
3774 if (! onlyjump_p (jump))
3775 return FALSE;
3777 /* We must be comparing objects whose modes imply the size. */
3778 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3779 return FALSE;
3781 /* Reverse the comparison code, if necessary. */
3782 code = GET_CODE (cond);
3783 if (then_bb == trap_bb)
3785 code = reversed_comparison_code (cond, jump);
3786 if (code == UNKNOWN)
3787 return FALSE;
3790 /* Attempt to generate the conditional trap. */
3791 rtx_insn *seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3792 copy_rtx (XEXP (cond, 1)),
3793 TRAP_CODE (PATTERN (trap)));
3794 if (seq == NULL)
3795 return FALSE;
3797 /* Emit the new insns before cond_earliest. */
3798 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3800 /* Delete the trap block if possible. */
3801 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3802 df_set_bb_dirty (test_bb);
3803 df_set_bb_dirty (then_bb);
3804 df_set_bb_dirty (else_bb);
3806 if (EDGE_COUNT (trap_bb->preds) == 0)
3808 delete_basic_block (trap_bb);
3809 num_true_changes++;
3812 /* Wire together the blocks again. */
3813 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3814 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3815 else if (trap_bb == then_bb)
3817 rtx lab = JUMP_LABEL (jump);
3818 rtx_insn *seq = targetm.gen_jump (lab);
3819 rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
3820 LABEL_NUSES (lab) += 1;
3821 JUMP_LABEL (newjump) = lab;
3822 emit_barrier_after (newjump);
3824 delete_insn (jump);
3826 if (can_merge_blocks_p (test_bb, other_bb))
3828 merge_blocks (test_bb, other_bb);
3829 num_true_changes++;
3832 num_updated_if_blocks++;
3833 return TRUE;
3836 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3837 return it. */
3839 static rtx_insn *
3840 block_has_only_trap (basic_block bb)
3842 rtx_insn *trap;
3844 /* We're not the exit block. */
3845 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3846 return NULL;
3848 /* The block must have no successors. */
3849 if (EDGE_COUNT (bb->succs) > 0)
3850 return NULL;
3852 /* The only instruction in the THEN block must be the trap. */
3853 trap = first_active_insn (bb);
3854 if (! (trap == BB_END (bb)
3855 && GET_CODE (PATTERN (trap)) == TRAP_IF
3856 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3857 return NULL;
3859 return trap;
3862 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3863 transformable, but not necessarily the other. There need be no
3864 JOIN block.
3866 Return TRUE if we were successful at converting the block.
3868 Cases we'd like to look at:
3871 if (test) goto over; // x not live
3872 x = a;
3873 goto label;
3874 over:
3876 becomes
3878 x = a;
3879 if (! test) goto label;
3882 if (test) goto E; // x not live
3883 x = big();
3884 goto L;
3886 x = b;
3887 goto M;
3889 becomes
3891 x = b;
3892 if (test) goto M;
3893 x = big();
3894 goto L;
3896 (3) // This one's really only interesting for targets that can do
3897 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3898 // it results in multiple branches on a cache line, which often
3899 // does not sit well with predictors.
3901 if (test1) goto E; // predicted not taken
3902 x = a;
3903 if (test2) goto F;
3906 x = b;
3909 becomes
3911 x = a;
3912 if (test1) goto E;
3913 if (test2) goto F;
3915 Notes:
3917 (A) Don't do (2) if the branch is predicted against the block we're
3918 eliminating. Do it anyway if we can eliminate a branch; this requires
3919 that the sole successor of the eliminated block postdominate the other
3920 side of the if.
3922 (B) With CE, on (3) we can steal from both sides of the if, creating
3924 if (test1) x = a;
3925 if (!test1) x = b;
3926 if (test1) goto J;
3927 if (test2) goto F;
3931 Again, this is most useful if J postdominates.
3933 (C) CE substitutes for helpful life information.
3935 (D) These heuristics need a lot of work. */
3937 /* Tests for case 1 above. */
3939 static int
3940 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3942 basic_block then_bb = then_edge->dest;
3943 basic_block else_bb = else_edge->dest;
3944 basic_block new_bb;
3945 int then_bb_index, then_prob;
3946 rtx else_target = NULL_RTX;
3948 /* If we are partitioning hot/cold basic blocks, we don't want to
3949 mess up unconditional or indirect jumps that cross between hot
3950 and cold sections.
3952 Basic block partitioning may result in some jumps that appear to
3953 be optimizable (or blocks that appear to be mergeable), but which really
3954 must be left untouched (they are required to make it safely across
3955 partition boundaries). See the comments at the top of
3956 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3958 if ((BB_END (then_bb)
3959 && JUMP_P (BB_END (then_bb))
3960 && CROSSING_JUMP_P (BB_END (then_bb)))
3961 || (BB_END (test_bb)
3962 && JUMP_P (BB_END (test_bb))
3963 && CROSSING_JUMP_P (BB_END (test_bb)))
3964 || (BB_END (else_bb)
3965 && JUMP_P (BB_END (else_bb))
3966 && CROSSING_JUMP_P (BB_END (else_bb))))
3967 return FALSE;
3969 /* THEN has one successor. */
3970 if (!single_succ_p (then_bb))
3971 return FALSE;
3973 /* THEN does not fall through, but is not strange either. */
3974 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3975 return FALSE;
3977 /* THEN has one predecessor. */
3978 if (!single_pred_p (then_bb))
3979 return FALSE;
3981 /* THEN must do something. */
3982 if (forwarder_block_p (then_bb))
3983 return FALSE;
3985 num_possible_if_blocks++;
3986 if (dump_file)
3987 fprintf (dump_file,
3988 "\nIF-CASE-1 found, start %d, then %d\n",
3989 test_bb->index, then_bb->index);
3991 if (then_edge->probability)
3992 then_prob = REG_BR_PROB_BASE - then_edge->probability;
3993 else
3994 then_prob = REG_BR_PROB_BASE / 2;
3996 /* We're speculating from the THEN path, we want to make sure the cost
3997 of speculation is within reason. */
3998 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
3999 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4000 predictable_edge_p (then_edge)))))
4001 return FALSE;
4003 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4005 rtx_insn *jump = BB_END (else_edge->src);
4006 gcc_assert (JUMP_P (jump));
4007 else_target = JUMP_LABEL (jump);
4010 /* Registers set are dead, or are predicable. */
4011 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4012 single_succ_edge (then_bb), 1))
4013 return FALSE;
4015 /* Conversion went ok, including moving the insns and fixing up the
4016 jump. Adjust the CFG to match. */
4018 /* We can avoid creating a new basic block if then_bb is immediately
4019 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4020 through to else_bb. */
4022 if (then_bb->next_bb == else_bb
4023 && then_bb->prev_bb == test_bb
4024 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4026 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4027 new_bb = 0;
4029 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4030 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4031 else_bb, else_target);
4032 else
4033 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4034 else_bb);
4036 df_set_bb_dirty (test_bb);
4037 df_set_bb_dirty (else_bb);
4039 then_bb_index = then_bb->index;
4040 delete_basic_block (then_bb);
4042 /* Make rest of code believe that the newly created block is the THEN_BB
4043 block we removed. */
4044 if (new_bb)
4046 df_bb_replace (then_bb_index, new_bb);
4047 /* This should have been done above via force_nonfallthru_and_redirect
4048 (possibly called from redirect_edge_and_branch_force). */
4049 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4052 num_true_changes++;
4053 num_updated_if_blocks++;
4055 return TRUE;
4058 /* Test for case 2 above. */
4060 static int
4061 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4063 basic_block then_bb = then_edge->dest;
4064 basic_block else_bb = else_edge->dest;
4065 edge else_succ;
4066 int then_prob, else_prob;
4068 /* We do not want to speculate (empty) loop latches. */
4069 if (current_loops
4070 && else_bb->loop_father->latch == else_bb)
4071 return FALSE;
4073 /* If we are partitioning hot/cold basic blocks, we don't want to
4074 mess up unconditional or indirect jumps that cross between hot
4075 and cold sections.
4077 Basic block partitioning may result in some jumps that appear to
4078 be optimizable (or blocks that appear to be mergeable), but which really
4079 must be left untouched (they are required to make it safely across
4080 partition boundaries). See the comments at the top of
4081 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4083 if ((BB_END (then_bb)
4084 && JUMP_P (BB_END (then_bb))
4085 && CROSSING_JUMP_P (BB_END (then_bb)))
4086 || (BB_END (test_bb)
4087 && JUMP_P (BB_END (test_bb))
4088 && CROSSING_JUMP_P (BB_END (test_bb)))
4089 || (BB_END (else_bb)
4090 && JUMP_P (BB_END (else_bb))
4091 && CROSSING_JUMP_P (BB_END (else_bb))))
4092 return FALSE;
4094 /* ELSE has one successor. */
4095 if (!single_succ_p (else_bb))
4096 return FALSE;
4097 else
4098 else_succ = single_succ_edge (else_bb);
4100 /* ELSE outgoing edge is not complex. */
4101 if (else_succ->flags & EDGE_COMPLEX)
4102 return FALSE;
4104 /* ELSE has one predecessor. */
4105 if (!single_pred_p (else_bb))
4106 return FALSE;
4108 /* THEN is not EXIT. */
4109 if (then_bb->index < NUM_FIXED_BLOCKS)
4110 return FALSE;
4112 if (else_edge->probability)
4114 else_prob = else_edge->probability;
4115 then_prob = REG_BR_PROB_BASE - else_prob;
4117 else
4119 else_prob = REG_BR_PROB_BASE / 2;
4120 then_prob = REG_BR_PROB_BASE / 2;
4123 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4124 if (else_prob > then_prob)
4126 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4127 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4128 else_succ->dest))
4130 else
4131 return FALSE;
4133 num_possible_if_blocks++;
4134 if (dump_file)
4135 fprintf (dump_file,
4136 "\nIF-CASE-2 found, start %d, else %d\n",
4137 test_bb->index, else_bb->index);
4139 /* We're speculating from the ELSE path, we want to make sure the cost
4140 of speculation is within reason. */
4141 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4142 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4143 predictable_edge_p (else_edge)))))
4144 return FALSE;
4146 /* Registers set are dead, or are predicable. */
4147 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4148 return FALSE;
4150 /* Conversion went ok, including moving the insns and fixing up the
4151 jump. Adjust the CFG to match. */
4153 df_set_bb_dirty (test_bb);
4154 df_set_bb_dirty (then_bb);
4155 delete_basic_block (else_bb);
4157 num_true_changes++;
4158 num_updated_if_blocks++;
4160 /* ??? We may now fallthru from one of THEN's successors into a join
4161 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4163 return TRUE;
4166 /* Used by the code above to perform the actual rtl transformations.
4167 Return TRUE if successful.
4169 TEST_BB is the block containing the conditional branch. MERGE_BB
4170 is the block containing the code to manipulate. DEST_EDGE is an
4171 edge representing a jump to the join block; after the conversion,
4172 TEST_BB should be branching to its destination.
4173 REVERSEP is true if the sense of the branch should be reversed. */
4175 static int
4176 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4177 basic_block other_bb, edge dest_edge, int reversep)
4179 basic_block new_dest = dest_edge->dest;
4180 rtx_insn *head, *end, *jump;
4181 rtx_insn *earliest = NULL;
4182 rtx old_dest;
4183 bitmap merge_set = NULL;
4184 /* Number of pending changes. */
4185 int n_validated_changes = 0;
4186 rtx new_dest_label = NULL_RTX;
4188 jump = BB_END (test_bb);
4190 /* Find the extent of the real code in the merge block. */
4191 head = BB_HEAD (merge_bb);
4192 end = BB_END (merge_bb);
4194 while (DEBUG_INSN_P (end) && end != head)
4195 end = PREV_INSN (end);
4197 /* If merge_bb ends with a tablejump, predicating/moving insn's
4198 into test_bb and then deleting merge_bb will result in the jumptable
4199 that follows merge_bb being removed along with merge_bb and then we
4200 get an unresolved reference to the jumptable. */
4201 if (tablejump_p (end, NULL, NULL))
4202 return FALSE;
4204 if (LABEL_P (head))
4205 head = NEXT_INSN (head);
4206 while (DEBUG_INSN_P (head) && head != end)
4207 head = NEXT_INSN (head);
4208 if (NOTE_P (head))
4210 if (head == end)
4212 head = end = NULL;
4213 goto no_body;
4215 head = NEXT_INSN (head);
4216 while (DEBUG_INSN_P (head) && head != end)
4217 head = NEXT_INSN (head);
4220 if (JUMP_P (end))
4222 if (!onlyjump_p (end))
4223 return FALSE;
4224 if (head == end)
4226 head = end = NULL;
4227 goto no_body;
4229 end = PREV_INSN (end);
4230 while (DEBUG_INSN_P (end) && end != head)
4231 end = PREV_INSN (end);
4234 /* Don't move frame-related insn across the conditional branch. This
4235 can lead to one of the paths of the branch having wrong unwind info. */
4236 if (epilogue_completed)
4238 rtx_insn *insn = head;
4239 while (1)
4241 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4242 return FALSE;
4243 if (insn == end)
4244 break;
4245 insn = NEXT_INSN (insn);
4249 /* Disable handling dead code by conditional execution if the machine needs
4250 to do anything funny with the tests, etc. */
4251 #ifndef IFCVT_MODIFY_TESTS
4252 if (targetm.have_conditional_execution ())
4254 /* In the conditional execution case, we have things easy. We know
4255 the condition is reversible. We don't have to check life info
4256 because we're going to conditionally execute the code anyway.
4257 All that's left is making sure the insns involved can actually
4258 be predicated. */
4260 rtx cond;
4262 cond = cond_exec_get_condition (jump);
4263 if (! cond)
4264 return FALSE;
4266 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4267 int prob_val = (note ? XINT (note, 0) : -1);
4269 if (reversep)
4271 enum rtx_code rev = reversed_comparison_code (cond, jump);
4272 if (rev == UNKNOWN)
4273 return FALSE;
4274 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4275 XEXP (cond, 1));
4276 if (prob_val >= 0)
4277 prob_val = REG_BR_PROB_BASE - prob_val;
4280 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4281 && verify_changes (0))
4282 n_validated_changes = num_validated_changes ();
4283 else
4284 cancel_changes (0);
4286 earliest = jump;
4288 #endif
4290 /* If we allocated new pseudos (e.g. in the conditional move
4291 expander called from noce_emit_cmove), we must resize the
4292 array first. */
4293 if (max_regno < max_reg_num ())
4294 max_regno = max_reg_num ();
4296 /* Try the NCE path if the CE path did not result in any changes. */
4297 if (n_validated_changes == 0)
4299 rtx cond;
4300 rtx_insn *insn;
4301 regset live;
4302 bool success;
4304 /* In the non-conditional execution case, we have to verify that there
4305 are no trapping operations, no calls, no references to memory, and
4306 that any registers modified are dead at the branch site. */
4308 if (!any_condjump_p (jump))
4309 return FALSE;
4311 /* Find the extent of the conditional. */
4312 cond = noce_get_condition (jump, &earliest, false);
4313 if (!cond)
4314 return FALSE;
4316 live = BITMAP_ALLOC (&reg_obstack);
4317 simulate_backwards_to_point (merge_bb, live, end);
4318 success = can_move_insns_across (head, end, earliest, jump,
4319 merge_bb, live,
4320 df_get_live_in (other_bb), NULL);
4321 BITMAP_FREE (live);
4322 if (!success)
4323 return FALSE;
4325 /* Collect the set of registers set in MERGE_BB. */
4326 merge_set = BITMAP_ALLOC (&reg_obstack);
4328 FOR_BB_INSNS (merge_bb, insn)
4329 if (NONDEBUG_INSN_P (insn))
4330 df_simulate_find_defs (insn, merge_set);
4332 /* If shrink-wrapping, disable this optimization when test_bb is
4333 the first basic block and merge_bb exits. The idea is to not
4334 move code setting up a return register as that may clobber a
4335 register used to pass function parameters, which then must be
4336 saved in caller-saved regs. A caller-saved reg requires the
4337 prologue, killing a shrink-wrap opportunity. */
4338 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4339 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4340 && single_succ_p (new_dest)
4341 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4342 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4344 regset return_regs;
4345 unsigned int i;
4347 return_regs = BITMAP_ALLOC (&reg_obstack);
4349 /* Start off with the intersection of regs used to pass
4350 params and regs used to return values. */
4351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4352 if (FUNCTION_ARG_REGNO_P (i)
4353 && targetm.calls.function_value_regno_p (i))
4354 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4356 bitmap_and_into (return_regs,
4357 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4358 bitmap_and_into (return_regs,
4359 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4360 if (!bitmap_empty_p (return_regs))
4362 FOR_BB_INSNS_REVERSE (new_dest, insn)
4363 if (NONDEBUG_INSN_P (insn))
4365 df_ref def;
4367 /* If this insn sets any reg in return_regs, add all
4368 reg uses to the set of regs we're interested in. */
4369 FOR_EACH_INSN_DEF (def, insn)
4370 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4372 df_simulate_uses (insn, return_regs);
4373 break;
4376 if (bitmap_intersect_p (merge_set, return_regs))
4378 BITMAP_FREE (return_regs);
4379 BITMAP_FREE (merge_set);
4380 return FALSE;
4383 BITMAP_FREE (return_regs);
4387 no_body:
4388 /* We don't want to use normal invert_jump or redirect_jump because
4389 we don't want to delete_insn called. Also, we want to do our own
4390 change group management. */
4392 old_dest = JUMP_LABEL (jump);
4393 if (other_bb != new_dest)
4395 if (!any_condjump_p (jump))
4396 goto cancel;
4398 if (JUMP_P (BB_END (dest_edge->src)))
4399 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4400 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4401 new_dest_label = ret_rtx;
4402 else
4403 new_dest_label = block_label (new_dest);
4405 rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
4406 if (reversep
4407 ? ! invert_jump_1 (jump_insn, new_dest_label)
4408 : ! redirect_jump_1 (jump_insn, new_dest_label))
4409 goto cancel;
4412 if (verify_changes (n_validated_changes))
4413 confirm_change_group ();
4414 else
4415 goto cancel;
4417 if (other_bb != new_dest)
4419 redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
4420 0, reversep);
4422 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4423 if (reversep)
4425 std::swap (BRANCH_EDGE (test_bb)->count,
4426 FALLTHRU_EDGE (test_bb)->count);
4427 std::swap (BRANCH_EDGE (test_bb)->probability,
4428 FALLTHRU_EDGE (test_bb)->probability);
4429 update_br_prob_note (test_bb);
4433 /* Move the insns out of MERGE_BB to before the branch. */
4434 if (head != NULL)
4436 rtx_insn *insn;
4438 if (end == BB_END (merge_bb))
4439 BB_END (merge_bb) = PREV_INSN (head);
4441 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4442 notes being moved might become invalid. */
4443 insn = head;
4446 rtx note;
4448 if (! INSN_P (insn))
4449 continue;
4450 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4451 if (! note)
4452 continue;
4453 remove_note (insn, note);
4454 } while (insn != end && (insn = NEXT_INSN (insn)));
4456 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4457 notes referring to the registers being set might become invalid. */
4458 if (merge_set)
4460 unsigned i;
4461 bitmap_iterator bi;
4463 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4464 remove_reg_equal_equiv_notes_for_regno (i);
4466 BITMAP_FREE (merge_set);
4469 reorder_insns (head, end, PREV_INSN (earliest));
4472 /* Remove the jump and edge if we can. */
4473 if (other_bb == new_dest)
4475 delete_insn (jump);
4476 remove_edge (BRANCH_EDGE (test_bb));
4477 /* ??? Can't merge blocks here, as then_bb is still in use.
4478 At minimum, the merge will get done just before bb-reorder. */
4481 return TRUE;
4483 cancel:
4484 cancel_changes (0);
4486 if (merge_set)
4487 BITMAP_FREE (merge_set);
4489 return FALSE;
4492 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
4493 we are after combine pass. */
4495 static void
4496 if_convert (bool after_combine)
4498 basic_block bb;
4499 int pass;
4501 if (optimize == 1)
4503 df_live_add_problem ();
4504 df_live_set_all_dirty ();
4507 /* Record whether we are after combine pass. */
4508 ifcvt_after_combine = after_combine;
4509 num_possible_if_blocks = 0;
4510 num_updated_if_blocks = 0;
4511 num_true_changes = 0;
4513 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4514 mark_loop_exit_edges ();
4515 loop_optimizer_finalize ();
4516 free_dominance_info (CDI_DOMINATORS);
4518 /* Compute postdominators. */
4519 calculate_dominance_info (CDI_POST_DOMINATORS);
4521 df_set_flags (DF_LR_RUN_DCE);
4523 /* Go through each of the basic blocks looking for things to convert. If we
4524 have conditional execution, we make multiple passes to allow us to handle
4525 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4526 pass = 0;
4529 df_analyze ();
4530 /* Only need to do dce on the first pass. */
4531 df_clear_flags (DF_LR_RUN_DCE);
4532 cond_exec_changed_p = FALSE;
4533 pass++;
4535 #ifdef IFCVT_MULTIPLE_DUMPS
4536 if (dump_file && pass > 1)
4537 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4538 #endif
4540 FOR_EACH_BB_FN (bb, cfun)
4542 basic_block new_bb;
4543 while (!df_get_bb_dirty (bb)
4544 && (new_bb = find_if_header (bb, pass)) != NULL)
4545 bb = new_bb;
4548 #ifdef IFCVT_MULTIPLE_DUMPS
4549 if (dump_file && cond_exec_changed_p)
4550 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4551 #endif
4553 while (cond_exec_changed_p);
4555 #ifdef IFCVT_MULTIPLE_DUMPS
4556 if (dump_file)
4557 fprintf (dump_file, "\n\n========== no more changes\n");
4558 #endif
4560 free_dominance_info (CDI_POST_DOMINATORS);
4562 if (dump_file)
4563 fflush (dump_file);
4565 clear_aux_for_blocks ();
4567 /* If we allocated new pseudos, we must resize the array for sched1. */
4568 if (max_regno < max_reg_num ())
4569 max_regno = max_reg_num ();
4571 /* Write the final stats. */
4572 if (dump_file && num_possible_if_blocks > 0)
4574 fprintf (dump_file,
4575 "\n%d possible IF blocks searched.\n",
4576 num_possible_if_blocks);
4577 fprintf (dump_file,
4578 "%d IF blocks converted.\n",
4579 num_updated_if_blocks);
4580 fprintf (dump_file,
4581 "%d true changes made.\n\n\n",
4582 num_true_changes);
4585 if (optimize == 1)
4586 df_remove_problem (df_live);
4588 #ifdef ENABLE_CHECKING
4589 verify_flow_info ();
4590 #endif
4593 /* If-conversion and CFG cleanup. */
4594 static unsigned int
4595 rest_of_handle_if_conversion (void)
4597 if (flag_if_conversion)
4599 if (dump_file)
4601 dump_reg_info (dump_file);
4602 dump_flow_info (dump_file, dump_flags);
4604 cleanup_cfg (CLEANUP_EXPENSIVE);
4605 if_convert (false);
4608 cleanup_cfg (0);
4609 return 0;
4612 namespace {
4614 const pass_data pass_data_rtl_ifcvt =
4616 RTL_PASS, /* type */
4617 "ce1", /* name */
4618 OPTGROUP_NONE, /* optinfo_flags */
4619 TV_IFCVT, /* tv_id */
4620 0, /* properties_required */
4621 0, /* properties_provided */
4622 0, /* properties_destroyed */
4623 0, /* todo_flags_start */
4624 TODO_df_finish, /* todo_flags_finish */
4627 class pass_rtl_ifcvt : public rtl_opt_pass
4629 public:
4630 pass_rtl_ifcvt (gcc::context *ctxt)
4631 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4634 /* opt_pass methods: */
4635 virtual bool gate (function *)
4637 return (optimize > 0) && dbg_cnt (if_conversion);
4640 virtual unsigned int execute (function *)
4642 return rest_of_handle_if_conversion ();
4645 }; // class pass_rtl_ifcvt
4647 } // anon namespace
4649 rtl_opt_pass *
4650 make_pass_rtl_ifcvt (gcc::context *ctxt)
4652 return new pass_rtl_ifcvt (ctxt);
4656 /* Rerun if-conversion, as combine may have simplified things enough
4657 to now meet sequence length restrictions. */
4659 namespace {
4661 const pass_data pass_data_if_after_combine =
4663 RTL_PASS, /* type */
4664 "ce2", /* name */
4665 OPTGROUP_NONE, /* optinfo_flags */
4666 TV_IFCVT, /* tv_id */
4667 0, /* properties_required */
4668 0, /* properties_provided */
4669 0, /* properties_destroyed */
4670 0, /* todo_flags_start */
4671 TODO_df_finish, /* todo_flags_finish */
4674 class pass_if_after_combine : public rtl_opt_pass
4676 public:
4677 pass_if_after_combine (gcc::context *ctxt)
4678 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4681 /* opt_pass methods: */
4682 virtual bool gate (function *)
4684 return optimize > 0 && flag_if_conversion
4685 && dbg_cnt (if_after_combine);
4688 virtual unsigned int execute (function *)
4690 if_convert (true);
4691 return 0;
4694 }; // class pass_if_after_combine
4696 } // anon namespace
4698 rtl_opt_pass *
4699 make_pass_if_after_combine (gcc::context *ctxt)
4701 return new pass_if_after_combine (ctxt);
4705 namespace {
4707 const pass_data pass_data_if_after_reload =
4709 RTL_PASS, /* type */
4710 "ce3", /* name */
4711 OPTGROUP_NONE, /* optinfo_flags */
4712 TV_IFCVT2, /* tv_id */
4713 0, /* properties_required */
4714 0, /* properties_provided */
4715 0, /* properties_destroyed */
4716 0, /* todo_flags_start */
4717 TODO_df_finish, /* todo_flags_finish */
4720 class pass_if_after_reload : public rtl_opt_pass
4722 public:
4723 pass_if_after_reload (gcc::context *ctxt)
4724 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4727 /* opt_pass methods: */
4728 virtual bool gate (function *)
4730 return optimize > 0 && flag_if_conversion2
4731 && dbg_cnt (if_after_reload);
4734 virtual unsigned int execute (function *)
4736 if_convert (true);
4737 return 0;
4740 }; // class pass_if_after_reload
4742 } // anon namespace
4744 rtl_opt_pass *
4745 make_pass_if_after_reload (gcc::context *ctxt)
4747 return new pass_if_after_reload (ctxt);