gcc/ada/
[official-gcc.git] / gcc / ifcvt.c
blob8cf0a95c2aeb1914294aff5907daf514cc6d3b24
1 /* If-conversion support.
2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hashtab.h"
28 #include "hash-set.h"
29 #include "vec.h"
30 #include "machmode.h"
31 #include "hard-reg-set.h"
32 #include "input.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "except.h"
38 #include "predict.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "cfgrtl.h"
42 #include "cfganal.h"
43 #include "cfgcleanup.h"
44 #include "basic-block.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "optabs.h"
48 #include "diagnostic-core.h"
49 #include "tm_p.h"
50 #include "cfgloop.h"
51 #include "target.h"
52 #include "tree-pass.h"
53 #include "df.h"
54 #include "dbgcnt.h"
55 #include "shrink-wrap.h"
56 #include "ifcvt.h"
58 #ifndef HAVE_conditional_move
59 #define HAVE_conditional_move 0
60 #endif
61 #ifndef HAVE_incscc
62 #define HAVE_incscc 0
63 #endif
64 #ifndef HAVE_decscc
65 #define HAVE_decscc 0
66 #endif
67 #ifndef HAVE_trap
68 #define HAVE_trap 0
69 #endif
71 #ifndef MAX_CONDITIONAL_EXECUTE
72 #define MAX_CONDITIONAL_EXECUTE \
73 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
74 + 1)
75 #endif
77 #define IFCVT_MULTIPLE_DUMPS 1
79 #define NULL_BLOCK ((basic_block) NULL)
81 /* True if after combine pass. */
82 static bool ifcvt_after_combine;
84 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
85 static int num_possible_if_blocks;
87 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
88 execution. */
89 static int num_updated_if_blocks;
91 /* # of changes made. */
92 static int num_true_changes;
94 /* Whether conditional execution changes were made. */
95 static int cond_exec_changed_p;
97 /* Forward references. */
98 static int count_bb_insns (const_basic_block);
99 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
100 static rtx_insn *first_active_insn (basic_block);
101 static rtx_insn *last_active_insn (basic_block, int);
102 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
103 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
104 static basic_block block_fallthru (basic_block);
105 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
106 int);
107 static rtx cond_exec_get_condition (rtx_insn *);
108 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
109 static int noce_operand_ok (const_rtx);
110 static void merge_if_block (ce_if_block *);
111 static int find_cond_trap (basic_block, edge, edge);
112 static basic_block find_if_header (basic_block, int);
113 static int block_jumps_and_fallthru_p (basic_block, basic_block);
114 static int noce_find_if_block (basic_block, edge, edge, int);
115 static int cond_exec_find_if_block (ce_if_block *);
116 static int find_if_case_1 (basic_block, edge, edge);
117 static int find_if_case_2 (basic_block, edge, edge);
118 static int dead_or_predicable (basic_block, basic_block, basic_block,
119 edge, int);
120 static void noce_emit_move_insn (rtx, rtx);
121 static rtx_insn *block_has_only_trap (basic_block);
123 /* Count the number of non-jump active insns in BB. */
125 static int
126 count_bb_insns (const_basic_block bb)
128 int count = 0;
129 rtx_insn *insn = BB_HEAD (bb);
131 while (1)
133 if (active_insn_p (insn) && !JUMP_P (insn))
134 count++;
136 if (insn == BB_END (bb))
137 break;
138 insn = NEXT_INSN (insn);
141 return count;
144 /* Determine whether the total insn_rtx_cost on non-jump insns in
145 basic block BB is less than MAX_COST. This function returns
146 false if the cost of any instruction could not be estimated.
148 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
149 as those insns are being speculated. MAX_COST is scaled with SCALE
150 plus a small fudge factor. */
152 static bool
153 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
155 int count = 0;
156 rtx_insn *insn = BB_HEAD (bb);
157 bool speed = optimize_bb_for_speed_p (bb);
159 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
160 applied to insn_rtx_cost when optimizing for size. Only do
161 this after combine because if-conversion might interfere with
162 passes before combine.
164 Use optimize_function_for_speed_p instead of the pre-defined
165 variable speed to make sure it is set to same value for all
166 basic blocks in one if-conversion transformation. */
167 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
168 scale = REG_BR_PROB_BASE;
169 /* Our branch probability/scaling factors are just estimates and don't
170 account for cases where we can get speculation for free and other
171 secondary benefits. So we fudge the scale factor to make speculating
172 appear a little more profitable when optimizing for performance. */
173 else
174 scale += REG_BR_PROB_BASE / 8;
177 max_cost *= scale;
179 while (1)
181 if (NONJUMP_INSN_P (insn))
183 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
184 if (cost == 0)
185 return false;
187 /* If this instruction is the load or set of a "stack" register,
188 such as a floating point register on x87, then the cost of
189 speculatively executing this insn may need to include
190 the additional cost of popping its result off of the
191 register stack. Unfortunately, correctly recognizing and
192 accounting for this additional overhead is tricky, so for
193 now we simply prohibit such speculative execution. */
194 #ifdef STACK_REGS
196 rtx set = single_set (insn);
197 if (set && STACK_REG_P (SET_DEST (set)))
198 return false;
200 #endif
202 count += cost;
203 if (count >= max_cost)
204 return false;
206 else if (CALL_P (insn))
207 return false;
209 if (insn == BB_END (bb))
210 break;
211 insn = NEXT_INSN (insn);
214 return true;
217 /* Return the first non-jump active insn in the basic block. */
219 static rtx_insn *
220 first_active_insn (basic_block bb)
222 rtx_insn *insn = BB_HEAD (bb);
224 if (LABEL_P (insn))
226 if (insn == BB_END (bb))
227 return NULL;
228 insn = NEXT_INSN (insn);
231 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
233 if (insn == BB_END (bb))
234 return NULL;
235 insn = NEXT_INSN (insn);
238 if (JUMP_P (insn))
239 return NULL;
241 return insn;
244 /* Return the last non-jump active (non-jump) insn in the basic block. */
246 static rtx_insn *
247 last_active_insn (basic_block bb, int skip_use_p)
249 rtx_insn *insn = BB_END (bb);
250 rtx_insn *head = BB_HEAD (bb);
252 while (NOTE_P (insn)
253 || JUMP_P (insn)
254 || DEBUG_INSN_P (insn)
255 || (skip_use_p
256 && NONJUMP_INSN_P (insn)
257 && GET_CODE (PATTERN (insn)) == USE))
259 if (insn == head)
260 return NULL;
261 insn = PREV_INSN (insn);
264 if (LABEL_P (insn))
265 return NULL;
267 return insn;
270 /* Return the active insn before INSN inside basic block CURR_BB. */
272 static rtx_insn *
273 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
275 if (!insn || insn == BB_HEAD (curr_bb))
276 return NULL;
278 while ((insn = PREV_INSN (insn)) != NULL_RTX)
280 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
281 break;
283 /* No other active insn all the way to the start of the basic block. */
284 if (insn == BB_HEAD (curr_bb))
285 return NULL;
288 return insn;
291 /* Return the active insn after INSN inside basic block CURR_BB. */
293 static rtx_insn *
294 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
296 if (!insn || insn == BB_END (curr_bb))
297 return NULL;
299 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
301 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
302 break;
304 /* No other active insn all the way to the end of the basic block. */
305 if (insn == BB_END (curr_bb))
306 return NULL;
309 return insn;
312 /* Return the basic block reached by falling though the basic block BB. */
314 static basic_block
315 block_fallthru (basic_block bb)
317 edge e = find_fallthru_edge (bb->succs);
319 return (e) ? e->dest : NULL_BLOCK;
322 /* Return true if RTXs A and B can be safely interchanged. */
324 static bool
325 rtx_interchangeable_p (const_rtx a, const_rtx b)
327 if (!rtx_equal_p (a, b))
328 return false;
330 if (GET_CODE (a) != MEM)
331 return true;
333 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
334 reference is not. Interchanging a dead type-unsafe memory reference with
335 a live type-safe one creates a live type-unsafe memory reference, in other
336 words, it makes the program illegal.
337 We check here conservatively whether the two memory references have equal
338 memory attributes. */
340 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
344 /* Go through a bunch of insns, converting them to conditional
345 execution format if possible. Return TRUE if all of the non-note
346 insns were processed. */
348 static int
349 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
350 /* if block information */rtx_insn *start,
351 /* first insn to look at */rtx end,
352 /* last insn to look at */rtx test,
353 /* conditional execution test */int prob_val,
354 /* probability of branch taken. */int mod_ok)
356 int must_be_last = FALSE;
357 rtx_insn *insn;
358 rtx xtest;
359 rtx pattern;
361 if (!start || !end)
362 return FALSE;
364 for (insn = start; ; insn = NEXT_INSN (insn))
366 /* dwarf2out can't cope with conditional prologues. */
367 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
368 return FALSE;
370 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
371 goto insn_done;
373 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
375 /* dwarf2out can't cope with conditional unwind info. */
376 if (RTX_FRAME_RELATED_P (insn))
377 return FALSE;
379 /* Remove USE insns that get in the way. */
380 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
382 /* ??? Ug. Actually unlinking the thing is problematic,
383 given what we'd have to coordinate with our callers. */
384 SET_INSN_DELETED (insn);
385 goto insn_done;
388 /* Last insn wasn't last? */
389 if (must_be_last)
390 return FALSE;
392 if (modified_in_p (test, insn))
394 if (!mod_ok)
395 return FALSE;
396 must_be_last = TRUE;
399 /* Now build the conditional form of the instruction. */
400 pattern = PATTERN (insn);
401 xtest = copy_rtx (test);
403 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
404 two conditions. */
405 if (GET_CODE (pattern) == COND_EXEC)
407 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
408 return FALSE;
410 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
411 COND_EXEC_TEST (pattern));
412 pattern = COND_EXEC_CODE (pattern);
415 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
417 /* If the machine needs to modify the insn being conditionally executed,
418 say for example to force a constant integer operand into a temp
419 register, do so here. */
420 #ifdef IFCVT_MODIFY_INSN
421 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
422 if (! pattern)
423 return FALSE;
424 #endif
426 validate_change (insn, &PATTERN (insn), pattern, 1);
428 if (CALL_P (insn) && prob_val >= 0)
429 validate_change (insn, &REG_NOTES (insn),
430 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
431 prob_val, REG_NOTES (insn)), 1);
433 insn_done:
434 if (insn == end)
435 break;
438 return TRUE;
441 /* Return the condition for a jump. Do not do any special processing. */
443 static rtx
444 cond_exec_get_condition (rtx_insn *jump)
446 rtx test_if, cond;
448 if (any_condjump_p (jump))
449 test_if = SET_SRC (pc_set (jump));
450 else
451 return NULL_RTX;
452 cond = XEXP (test_if, 0);
454 /* If this branches to JUMP_LABEL when the condition is false,
455 reverse the condition. */
456 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
457 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
459 enum rtx_code rev = reversed_comparison_code (cond, jump);
460 if (rev == UNKNOWN)
461 return NULL_RTX;
463 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
464 XEXP (cond, 1));
467 return cond;
470 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
471 to conditional execution. Return TRUE if we were successful at
472 converting the block. */
474 static int
475 cond_exec_process_if_block (ce_if_block * ce_info,
476 /* if block information */int do_multiple_p)
478 basic_block test_bb = ce_info->test_bb; /* last test block */
479 basic_block then_bb = ce_info->then_bb; /* THEN */
480 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
481 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
482 rtx_insn *then_start; /* first insn in THEN block */
483 rtx_insn *then_end; /* last insn + 1 in THEN block */
484 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
485 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
486 int max; /* max # of insns to convert. */
487 int then_mod_ok; /* whether conditional mods are ok in THEN */
488 rtx true_expr; /* test for else block insns */
489 rtx false_expr; /* test for then block insns */
490 int true_prob_val; /* probability of else block */
491 int false_prob_val; /* probability of then block */
492 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
493 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
494 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
495 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
496 int then_n_insns, else_n_insns, n_insns;
497 enum rtx_code false_code;
498 rtx note;
500 /* If test is comprised of && or || elements, and we've failed at handling
501 all of them together, just use the last test if it is the special case of
502 && elements without an ELSE block. */
503 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
505 if (else_bb || ! ce_info->and_and_p)
506 return FALSE;
508 ce_info->test_bb = test_bb = ce_info->last_test_bb;
509 ce_info->num_multiple_test_blocks = 0;
510 ce_info->num_and_and_blocks = 0;
511 ce_info->num_or_or_blocks = 0;
514 /* Find the conditional jump to the ELSE or JOIN part, and isolate
515 the test. */
516 test_expr = cond_exec_get_condition (BB_END (test_bb));
517 if (! test_expr)
518 return FALSE;
520 /* If the conditional jump is more than just a conditional jump,
521 then we can not do conditional execution conversion on this block. */
522 if (! onlyjump_p (BB_END (test_bb)))
523 return FALSE;
525 /* Collect the bounds of where we're to search, skipping any labels, jumps
526 and notes at the beginning and end of the block. Then count the total
527 number of insns and see if it is small enough to convert. */
528 then_start = first_active_insn (then_bb);
529 then_end = last_active_insn (then_bb, TRUE);
530 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
531 n_insns = then_n_insns;
532 max = MAX_CONDITIONAL_EXECUTE;
534 if (else_bb)
536 int n_matching;
538 max *= 2;
539 else_start = first_active_insn (else_bb);
540 else_end = last_active_insn (else_bb, TRUE);
541 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
542 n_insns += else_n_insns;
544 /* Look for matching sequences at the head and tail of the two blocks,
545 and limit the range of insns to be converted if possible. */
546 n_matching = flow_find_cross_jump (then_bb, else_bb,
547 &then_first_tail, &else_first_tail,
548 NULL);
549 if (then_first_tail == BB_HEAD (then_bb))
550 then_start = then_end = NULL;
551 if (else_first_tail == BB_HEAD (else_bb))
552 else_start = else_end = NULL;
554 if (n_matching > 0)
556 if (then_end)
557 then_end = find_active_insn_before (then_bb, then_first_tail);
558 if (else_end)
559 else_end = find_active_insn_before (else_bb, else_first_tail);
560 n_insns -= 2 * n_matching;
563 if (then_start
564 && else_start
565 && then_n_insns > n_matching
566 && else_n_insns > n_matching)
568 int longest_match = MIN (then_n_insns - n_matching,
569 else_n_insns - n_matching);
570 n_matching
571 = flow_find_head_matching_sequence (then_bb, else_bb,
572 &then_last_head,
573 &else_last_head,
574 longest_match);
576 if (n_matching > 0)
578 rtx_insn *insn;
580 /* We won't pass the insns in the head sequence to
581 cond_exec_process_insns, so we need to test them here
582 to make sure that they don't clobber the condition. */
583 for (insn = BB_HEAD (then_bb);
584 insn != NEXT_INSN (then_last_head);
585 insn = NEXT_INSN (insn))
586 if (!LABEL_P (insn) && !NOTE_P (insn)
587 && !DEBUG_INSN_P (insn)
588 && modified_in_p (test_expr, insn))
589 return FALSE;
592 if (then_last_head == then_end)
593 then_start = then_end = NULL;
594 if (else_last_head == else_end)
595 else_start = else_end = NULL;
597 if (n_matching > 0)
599 if (then_start)
600 then_start = find_active_insn_after (then_bb, then_last_head);
601 if (else_start)
602 else_start = find_active_insn_after (else_bb, else_last_head);
603 n_insns -= 2 * n_matching;
608 if (n_insns > max)
609 return FALSE;
611 /* Map test_expr/test_jump into the appropriate MD tests to use on
612 the conditionally executed code. */
614 true_expr = test_expr;
616 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
617 if (false_code != UNKNOWN)
618 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
619 XEXP (true_expr, 0), XEXP (true_expr, 1));
620 else
621 false_expr = NULL_RTX;
623 #ifdef IFCVT_MODIFY_TESTS
624 /* If the machine description needs to modify the tests, such as setting a
625 conditional execution register from a comparison, it can do so here. */
626 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
628 /* See if the conversion failed. */
629 if (!true_expr || !false_expr)
630 goto fail;
631 #endif
633 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
634 if (note)
636 true_prob_val = XINT (note, 0);
637 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
639 else
641 true_prob_val = -1;
642 false_prob_val = -1;
645 /* If we have && or || tests, do them here. These tests are in the adjacent
646 blocks after the first block containing the test. */
647 if (ce_info->num_multiple_test_blocks > 0)
649 basic_block bb = test_bb;
650 basic_block last_test_bb = ce_info->last_test_bb;
652 if (! false_expr)
653 goto fail;
657 rtx_insn *start, *end;
658 rtx t, f;
659 enum rtx_code f_code;
661 bb = block_fallthru (bb);
662 start = first_active_insn (bb);
663 end = last_active_insn (bb, TRUE);
664 if (start
665 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
666 false_prob_val, FALSE))
667 goto fail;
669 /* If the conditional jump is more than just a conditional jump, then
670 we can not do conditional execution conversion on this block. */
671 if (! onlyjump_p (BB_END (bb)))
672 goto fail;
674 /* Find the conditional jump and isolate the test. */
675 t = cond_exec_get_condition (BB_END (bb));
676 if (! t)
677 goto fail;
679 f_code = reversed_comparison_code (t, BB_END (bb));
680 if (f_code == UNKNOWN)
681 goto fail;
683 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
684 if (ce_info->and_and_p)
686 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
687 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
689 else
691 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
692 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
695 /* If the machine description needs to modify the tests, such as
696 setting a conditional execution register from a comparison, it can
697 do so here. */
698 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
699 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
701 /* See if the conversion failed. */
702 if (!t || !f)
703 goto fail;
704 #endif
706 true_expr = t;
707 false_expr = f;
709 while (bb != last_test_bb);
712 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
713 on then THEN block. */
714 then_mod_ok = (else_bb == NULL_BLOCK);
716 /* Go through the THEN and ELSE blocks converting the insns if possible
717 to conditional execution. */
719 if (then_end
720 && (! false_expr
721 || ! cond_exec_process_insns (ce_info, then_start, then_end,
722 false_expr, false_prob_val,
723 then_mod_ok)))
724 goto fail;
726 if (else_bb && else_end
727 && ! cond_exec_process_insns (ce_info, else_start, else_end,
728 true_expr, true_prob_val, TRUE))
729 goto fail;
731 /* If we cannot apply the changes, fail. Do not go through the normal fail
732 processing, since apply_change_group will call cancel_changes. */
733 if (! apply_change_group ())
735 #ifdef IFCVT_MODIFY_CANCEL
736 /* Cancel any machine dependent changes. */
737 IFCVT_MODIFY_CANCEL (ce_info);
738 #endif
739 return FALSE;
742 #ifdef IFCVT_MODIFY_FINAL
743 /* Do any machine dependent final modifications. */
744 IFCVT_MODIFY_FINAL (ce_info);
745 #endif
747 /* Conversion succeeded. */
748 if (dump_file)
749 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
750 n_insns, (n_insns == 1) ? " was" : "s were");
752 /* Merge the blocks! If we had matching sequences, make sure to delete one
753 copy at the appropriate location first: delete the copy in the THEN branch
754 for a tail sequence so that the remaining one is executed last for both
755 branches, and delete the copy in the ELSE branch for a head sequence so
756 that the remaining one is executed first for both branches. */
757 if (then_first_tail)
759 rtx_insn *from = then_first_tail;
760 if (!INSN_P (from))
761 from = find_active_insn_after (then_bb, from);
762 delete_insn_chain (from, BB_END (then_bb), false);
764 if (else_last_head)
765 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
767 merge_if_block (ce_info);
768 cond_exec_changed_p = TRUE;
769 return TRUE;
771 fail:
772 #ifdef IFCVT_MODIFY_CANCEL
773 /* Cancel any machine dependent changes. */
774 IFCVT_MODIFY_CANCEL (ce_info);
775 #endif
777 cancel_changes (0);
778 return FALSE;
781 /* Used by noce_process_if_block to communicate with its subroutines.
783 The subroutines know that A and B may be evaluated freely. They
784 know that X is a register. They should insert new instructions
785 before cond_earliest. */
787 struct noce_if_info
789 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
790 basic_block test_bb, then_bb, else_bb, join_bb;
792 /* The jump that ends TEST_BB. */
793 rtx_insn *jump;
795 /* The jump condition. */
796 rtx cond;
798 /* New insns should be inserted before this one. */
799 rtx_insn *cond_earliest;
801 /* Insns in the THEN and ELSE block. There is always just this
802 one insns in those blocks. The insns are single_set insns.
803 If there was no ELSE block, INSN_B is the last insn before
804 COND_EARLIEST, or NULL_RTX. In the former case, the insn
805 operands are still valid, as if INSN_B was moved down below
806 the jump. */
807 rtx_insn *insn_a, *insn_b;
809 /* The SET_SRC of INSN_A and INSN_B. */
810 rtx a, b;
812 /* The SET_DEST of INSN_A. */
813 rtx x;
815 /* True if this if block is not canonical. In the canonical form of
816 if blocks, the THEN_BB is the block reached via the fallthru edge
817 from TEST_BB. For the noce transformations, we allow the symmetric
818 form as well. */
819 bool then_else_reversed;
821 /* Estimated cost of the particular branch instruction. */
822 int branch_cost;
825 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
826 static int noce_try_move (struct noce_if_info *);
827 static int noce_try_store_flag (struct noce_if_info *);
828 static int noce_try_addcc (struct noce_if_info *);
829 static int noce_try_store_flag_constants (struct noce_if_info *);
830 static int noce_try_store_flag_mask (struct noce_if_info *);
831 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
832 rtx, rtx, rtx);
833 static int noce_try_cmove (struct noce_if_info *);
834 static int noce_try_cmove_arith (struct noce_if_info *);
835 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
836 static int noce_try_minmax (struct noce_if_info *);
837 static int noce_try_abs (struct noce_if_info *);
838 static int noce_try_sign_mask (struct noce_if_info *);
840 /* Helper function for noce_try_store_flag*. */
842 static rtx
843 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
844 int normalize)
846 rtx cond = if_info->cond;
847 int cond_complex;
848 enum rtx_code code;
850 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
851 || ! general_operand (XEXP (cond, 1), VOIDmode));
853 /* If earliest == jump, or when the condition is complex, try to
854 build the store_flag insn directly. */
856 if (cond_complex)
858 rtx set = pc_set (if_info->jump);
859 cond = XEXP (SET_SRC (set), 0);
860 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
861 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
862 reversep = !reversep;
863 if (if_info->then_else_reversed)
864 reversep = !reversep;
867 if (reversep)
868 code = reversed_comparison_code (cond, if_info->jump);
869 else
870 code = GET_CODE (cond);
872 if ((if_info->cond_earliest == if_info->jump || cond_complex)
873 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
875 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
876 XEXP (cond, 1));
877 rtx set = gen_rtx_SET (VOIDmode, x, src);
879 start_sequence ();
880 rtx_insn *insn = emit_insn (set);
882 if (recog_memoized (insn) >= 0)
884 rtx_insn *seq = get_insns ();
885 end_sequence ();
886 emit_insn (seq);
888 if_info->cond_earliest = if_info->jump;
890 return x;
893 end_sequence ();
896 /* Don't even try if the comparison operands or the mode of X are weird. */
897 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
898 return NULL_RTX;
900 return emit_store_flag (x, code, XEXP (cond, 0),
901 XEXP (cond, 1), VOIDmode,
902 (code == LTU || code == LEU
903 || code == GEU || code == GTU), normalize);
906 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
907 X is the destination/target and Y is the value to copy. */
909 static void
910 noce_emit_move_insn (rtx x, rtx y)
912 machine_mode outmode;
913 rtx outer, inner;
914 int bitpos;
916 if (GET_CODE (x) != STRICT_LOW_PART)
918 rtx_insn *seq, *insn;
919 rtx target;
920 optab ot;
922 start_sequence ();
923 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
924 otherwise construct a suitable SET pattern ourselves. */
925 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
926 ? emit_move_insn (x, y)
927 : emit_insn (gen_rtx_SET (VOIDmode, x, y));
928 seq = get_insns ();
929 end_sequence ();
931 if (recog_memoized (insn) <= 0)
933 if (GET_CODE (x) == ZERO_EXTRACT)
935 rtx op = XEXP (x, 0);
936 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
937 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
939 /* store_bit_field expects START to be relative to
940 BYTES_BIG_ENDIAN and adjusts this value for machines with
941 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
942 invoke store_bit_field again it is necessary to have the START
943 value from the first call. */
944 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
946 if (MEM_P (op))
947 start = BITS_PER_UNIT - start - size;
948 else
950 gcc_assert (REG_P (op));
951 start = BITS_PER_WORD - start - size;
955 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
956 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
957 return;
960 switch (GET_RTX_CLASS (GET_CODE (y)))
962 case RTX_UNARY:
963 ot = code_to_optab (GET_CODE (y));
964 if (ot)
966 start_sequence ();
967 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
968 if (target != NULL_RTX)
970 if (target != x)
971 emit_move_insn (x, target);
972 seq = get_insns ();
974 end_sequence ();
976 break;
978 case RTX_BIN_ARITH:
979 case RTX_COMM_ARITH:
980 ot = code_to_optab (GET_CODE (y));
981 if (ot)
983 start_sequence ();
984 target = expand_binop (GET_MODE (y), ot,
985 XEXP (y, 0), XEXP (y, 1),
986 x, 0, OPTAB_DIRECT);
987 if (target != NULL_RTX)
989 if (target != x)
990 emit_move_insn (x, target);
991 seq = get_insns ();
993 end_sequence ();
995 break;
997 default:
998 break;
1002 emit_insn (seq);
1003 return;
1006 outer = XEXP (x, 0);
1007 inner = XEXP (outer, 0);
1008 outmode = GET_MODE (outer);
1009 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1010 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1011 0, 0, outmode, y);
1014 /* Return sequence of instructions generated by if conversion. This
1015 function calls end_sequence() to end the current stream, ensures
1016 that are instructions are unshared, recognizable non-jump insns.
1017 On failure, this function returns a NULL_RTX. */
1019 static rtx_insn *
1020 end_ifcvt_sequence (struct noce_if_info *if_info)
1022 rtx_insn *insn;
1023 rtx_insn *seq = get_insns ();
1025 set_used_flags (if_info->x);
1026 set_used_flags (if_info->cond);
1027 set_used_flags (if_info->a);
1028 set_used_flags (if_info->b);
1029 unshare_all_rtl_in_chain (seq);
1030 end_sequence ();
1032 /* Make sure that all of the instructions emitted are recognizable,
1033 and that we haven't introduced a new jump instruction.
1034 As an exercise for the reader, build a general mechanism that
1035 allows proper placement of required clobbers. */
1036 for (insn = seq; insn; insn = NEXT_INSN (insn))
1037 if (JUMP_P (insn)
1038 || recog_memoized (insn) == -1)
1039 return NULL;
1041 return seq;
1044 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1045 "if (a == b) x = a; else x = b" into "x = b". */
1047 static int
1048 noce_try_move (struct noce_if_info *if_info)
1050 rtx cond = if_info->cond;
1051 enum rtx_code code = GET_CODE (cond);
1052 rtx y;
1053 rtx_insn *seq;
1055 if (code != NE && code != EQ)
1056 return FALSE;
1058 /* This optimization isn't valid if either A or B could be a NaN
1059 or a signed zero. */
1060 if (HONOR_NANS (GET_MODE (if_info->x))
1061 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1062 return FALSE;
1064 /* Check whether the operands of the comparison are A and in
1065 either order. */
1066 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1067 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1068 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1069 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1071 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1072 return FALSE;
1074 y = (code == EQ) ? if_info->a : if_info->b;
1076 /* Avoid generating the move if the source is the destination. */
1077 if (! rtx_equal_p (if_info->x, y))
1079 start_sequence ();
1080 noce_emit_move_insn (if_info->x, y);
1081 seq = end_ifcvt_sequence (if_info);
1082 if (!seq)
1083 return FALSE;
1085 emit_insn_before_setloc (seq, if_info->jump,
1086 INSN_LOCATION (if_info->insn_a));
1088 return TRUE;
1090 return FALSE;
1093 /* Convert "if (test) x = 1; else x = 0".
1095 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1096 tried in noce_try_store_flag_constants after noce_try_cmove has had
1097 a go at the conversion. */
1099 static int
1100 noce_try_store_flag (struct noce_if_info *if_info)
1102 int reversep;
1103 rtx target;
1104 rtx_insn *seq;
1106 if (CONST_INT_P (if_info->b)
1107 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1108 && if_info->a == const0_rtx)
1109 reversep = 0;
1110 else if (if_info->b == const0_rtx
1111 && CONST_INT_P (if_info->a)
1112 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1113 && (reversed_comparison_code (if_info->cond, if_info->jump)
1114 != UNKNOWN))
1115 reversep = 1;
1116 else
1117 return FALSE;
1119 start_sequence ();
1121 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1122 if (target)
1124 if (target != if_info->x)
1125 noce_emit_move_insn (if_info->x, target);
1127 seq = end_ifcvt_sequence (if_info);
1128 if (! seq)
1129 return FALSE;
1131 emit_insn_before_setloc (seq, if_info->jump,
1132 INSN_LOCATION (if_info->insn_a));
1133 return TRUE;
1135 else
1137 end_sequence ();
1138 return FALSE;
1142 /* Convert "if (test) x = a; else x = b", for A and B constant. */
1144 static int
1145 noce_try_store_flag_constants (struct noce_if_info *if_info)
1147 rtx target;
1148 rtx_insn *seq;
1149 int reversep;
1150 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1151 int normalize, can_reverse;
1152 machine_mode mode;
1154 if (CONST_INT_P (if_info->a)
1155 && CONST_INT_P (if_info->b))
1157 mode = GET_MODE (if_info->x);
1158 ifalse = INTVAL (if_info->a);
1159 itrue = INTVAL (if_info->b);
1161 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1162 /* Make sure we can represent the difference between the two values. */
1163 if ((diff > 0)
1164 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1165 return FALSE;
1167 diff = trunc_int_for_mode (diff, mode);
1169 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1170 != UNKNOWN);
1172 reversep = 0;
1173 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1174 normalize = 0;
1175 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1176 && (STORE_FLAG_VALUE == 1
1177 || if_info->branch_cost >= 2))
1178 normalize = 1;
1179 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1180 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1181 normalize = 1, reversep = 1;
1182 else if (itrue == -1
1183 && (STORE_FLAG_VALUE == -1
1184 || if_info->branch_cost >= 2))
1185 normalize = -1;
1186 else if (ifalse == -1 && can_reverse
1187 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1188 normalize = -1, reversep = 1;
1189 else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1190 || if_info->branch_cost >= 3)
1191 normalize = -1;
1192 else
1193 return FALSE;
1195 if (reversep)
1197 tmp = itrue; itrue = ifalse; ifalse = tmp;
1198 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1201 start_sequence ();
1202 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1203 if (! target)
1205 end_sequence ();
1206 return FALSE;
1209 /* if (test) x = 3; else x = 4;
1210 => x = 3 + (test == 0); */
1211 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1213 target = expand_simple_binop (mode,
1214 (diff == STORE_FLAG_VALUE
1215 ? PLUS : MINUS),
1216 gen_int_mode (ifalse, mode), target,
1217 if_info->x, 0, OPTAB_WIDEN);
1220 /* if (test) x = 8; else x = 0;
1221 => x = (test != 0) << 3; */
1222 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1224 target = expand_simple_binop (mode, ASHIFT,
1225 target, GEN_INT (tmp), if_info->x, 0,
1226 OPTAB_WIDEN);
1229 /* if (test) x = -1; else x = b;
1230 => x = -(test != 0) | b; */
1231 else if (itrue == -1)
1233 target = expand_simple_binop (mode, IOR,
1234 target, gen_int_mode (ifalse, mode),
1235 if_info->x, 0, OPTAB_WIDEN);
1238 /* if (test) x = a; else x = b;
1239 => x = (-(test != 0) & (b - a)) + a; */
1240 else
1242 target = expand_simple_binop (mode, AND,
1243 target, gen_int_mode (diff, mode),
1244 if_info->x, 0, OPTAB_WIDEN);
1245 if (target)
1246 target = expand_simple_binop (mode, PLUS,
1247 target, gen_int_mode (ifalse, mode),
1248 if_info->x, 0, OPTAB_WIDEN);
1251 if (! target)
1253 end_sequence ();
1254 return FALSE;
1257 if (target != if_info->x)
1258 noce_emit_move_insn (if_info->x, target);
1260 seq = end_ifcvt_sequence (if_info);
1261 if (!seq)
1262 return FALSE;
1264 emit_insn_before_setloc (seq, if_info->jump,
1265 INSN_LOCATION (if_info->insn_a));
1266 return TRUE;
1269 return FALSE;
1272 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1273 similarly for "foo--". */
1275 static int
1276 noce_try_addcc (struct noce_if_info *if_info)
1278 rtx target;
1279 rtx_insn *seq;
1280 int subtract, normalize;
1282 if (GET_CODE (if_info->a) == PLUS
1283 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1284 && (reversed_comparison_code (if_info->cond, if_info->jump)
1285 != UNKNOWN))
1287 rtx cond = if_info->cond;
1288 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1290 /* First try to use addcc pattern. */
1291 if (general_operand (XEXP (cond, 0), VOIDmode)
1292 && general_operand (XEXP (cond, 1), VOIDmode))
1294 start_sequence ();
1295 target = emit_conditional_add (if_info->x, code,
1296 XEXP (cond, 0),
1297 XEXP (cond, 1),
1298 VOIDmode,
1299 if_info->b,
1300 XEXP (if_info->a, 1),
1301 GET_MODE (if_info->x),
1302 (code == LTU || code == GEU
1303 || code == LEU || code == GTU));
1304 if (target)
1306 if (target != if_info->x)
1307 noce_emit_move_insn (if_info->x, target);
1309 seq = end_ifcvt_sequence (if_info);
1310 if (!seq)
1311 return FALSE;
1313 emit_insn_before_setloc (seq, if_info->jump,
1314 INSN_LOCATION (if_info->insn_a));
1315 return TRUE;
1317 end_sequence ();
1320 /* If that fails, construct conditional increment or decrement using
1321 setcc. */
1322 if (if_info->branch_cost >= 2
1323 && (XEXP (if_info->a, 1) == const1_rtx
1324 || XEXP (if_info->a, 1) == constm1_rtx))
1326 start_sequence ();
1327 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1328 subtract = 0, normalize = 0;
1329 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1330 subtract = 1, normalize = 0;
1331 else
1332 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1335 target = noce_emit_store_flag (if_info,
1336 gen_reg_rtx (GET_MODE (if_info->x)),
1337 1, normalize);
1339 if (target)
1340 target = expand_simple_binop (GET_MODE (if_info->x),
1341 subtract ? MINUS : PLUS,
1342 if_info->b, target, if_info->x,
1343 0, OPTAB_WIDEN);
1344 if (target)
1346 if (target != if_info->x)
1347 noce_emit_move_insn (if_info->x, target);
1349 seq = end_ifcvt_sequence (if_info);
1350 if (!seq)
1351 return FALSE;
1353 emit_insn_before_setloc (seq, if_info->jump,
1354 INSN_LOCATION (if_info->insn_a));
1355 return TRUE;
1357 end_sequence ();
1361 return FALSE;
1364 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1366 static int
1367 noce_try_store_flag_mask (struct noce_if_info *if_info)
1369 rtx target;
1370 rtx_insn *seq;
1371 int reversep;
1373 reversep = 0;
1374 if ((if_info->branch_cost >= 2
1375 || STORE_FLAG_VALUE == -1)
1376 && ((if_info->a == const0_rtx
1377 && rtx_equal_p (if_info->b, if_info->x))
1378 || ((reversep = (reversed_comparison_code (if_info->cond,
1379 if_info->jump)
1380 != UNKNOWN))
1381 && if_info->b == const0_rtx
1382 && rtx_equal_p (if_info->a, if_info->x))))
1384 start_sequence ();
1385 target = noce_emit_store_flag (if_info,
1386 gen_reg_rtx (GET_MODE (if_info->x)),
1387 reversep, -1);
1388 if (target)
1389 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1390 if_info->x,
1391 target, if_info->x, 0,
1392 OPTAB_WIDEN);
1394 if (target)
1396 if (target != if_info->x)
1397 noce_emit_move_insn (if_info->x, target);
1399 seq = end_ifcvt_sequence (if_info);
1400 if (!seq)
1401 return FALSE;
1403 emit_insn_before_setloc (seq, if_info->jump,
1404 INSN_LOCATION (if_info->insn_a));
1405 return TRUE;
1408 end_sequence ();
1411 return FALSE;
1414 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1416 static rtx
1417 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1418 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1420 rtx target ATTRIBUTE_UNUSED;
1421 int unsignedp ATTRIBUTE_UNUSED;
1423 /* If earliest == jump, try to build the cmove insn directly.
1424 This is helpful when combine has created some complex condition
1425 (like for alpha's cmovlbs) that we can't hope to regenerate
1426 through the normal interface. */
1428 if (if_info->cond_earliest == if_info->jump)
1430 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1431 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1432 cond, vtrue, vfalse);
1433 rtx set = gen_rtx_SET (VOIDmode, x, if_then_else);
1435 start_sequence ();
1436 rtx_insn *insn = emit_insn (set);
1438 if (recog_memoized (insn) >= 0)
1440 rtx_insn *seq = get_insns ();
1441 end_sequence ();
1442 emit_insn (seq);
1444 return x;
1447 end_sequence ();
1450 /* Don't even try if the comparison operands are weird. */
1451 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1452 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1453 return NULL_RTX;
1455 #if HAVE_conditional_move
1456 unsignedp = (code == LTU || code == GEU
1457 || code == LEU || code == GTU);
1459 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1460 vtrue, vfalse, GET_MODE (x),
1461 unsignedp);
1462 if (target)
1463 return target;
1465 /* We might be faced with a situation like:
1467 x = (reg:M TARGET)
1468 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1469 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1471 We can't do a conditional move in mode M, but it's possible that we
1472 could do a conditional move in mode N instead and take a subreg of
1473 the result.
1475 If we can't create new pseudos, though, don't bother. */
1476 if (reload_completed)
1477 return NULL_RTX;
1479 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1481 rtx reg_vtrue = SUBREG_REG (vtrue);
1482 rtx reg_vfalse = SUBREG_REG (vfalse);
1483 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1484 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1485 rtx promoted_target;
1487 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1488 || byte_vtrue != byte_vfalse
1489 || (SUBREG_PROMOTED_VAR_P (vtrue)
1490 != SUBREG_PROMOTED_VAR_P (vfalse))
1491 || (SUBREG_PROMOTED_GET (vtrue)
1492 != SUBREG_PROMOTED_GET (vfalse)))
1493 return NULL_RTX;
1495 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1497 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1498 VOIDmode, reg_vtrue, reg_vfalse,
1499 GET_MODE (reg_vtrue), unsignedp);
1500 /* Nope, couldn't do it in that mode either. */
1501 if (!target)
1502 return NULL_RTX;
1504 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1505 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1506 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1507 emit_move_insn (x, target);
1508 return x;
1510 else
1511 return NULL_RTX;
1512 #else
1513 /* We'll never get here, as noce_process_if_block doesn't call the
1514 functions involved. Ifdef code, however, should be discouraged
1515 because it leads to typos in the code not selected. However,
1516 emit_conditional_move won't exist either. */
1517 return NULL_RTX;
1518 #endif
1521 /* Try only simple constants and registers here. More complex cases
1522 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1523 has had a go at it. */
1525 static int
1526 noce_try_cmove (struct noce_if_info *if_info)
1528 enum rtx_code code;
1529 rtx target;
1530 rtx_insn *seq;
1532 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1533 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1535 start_sequence ();
1537 code = GET_CODE (if_info->cond);
1538 target = noce_emit_cmove (if_info, if_info->x, code,
1539 XEXP (if_info->cond, 0),
1540 XEXP (if_info->cond, 1),
1541 if_info->a, if_info->b);
1543 if (target)
1545 if (target != if_info->x)
1546 noce_emit_move_insn (if_info->x, target);
1548 seq = end_ifcvt_sequence (if_info);
1549 if (!seq)
1550 return FALSE;
1552 emit_insn_before_setloc (seq, if_info->jump,
1553 INSN_LOCATION (if_info->insn_a));
1554 return TRUE;
1556 else
1558 end_sequence ();
1559 return FALSE;
1563 return FALSE;
1566 /* Try more complex cases involving conditional_move. */
1568 static int
1569 noce_try_cmove_arith (struct noce_if_info *if_info)
1571 rtx a = if_info->a;
1572 rtx b = if_info->b;
1573 rtx x = if_info->x;
1574 rtx orig_a, orig_b;
1575 rtx_insn *insn_a, *insn_b;
1576 rtx target;
1577 int is_mem = 0;
1578 int insn_cost;
1579 enum rtx_code code;
1580 rtx_insn *ifcvt_seq;
1582 /* A conditional move from two memory sources is equivalent to a
1583 conditional on their addresses followed by a load. Don't do this
1584 early because it'll screw alias analysis. Note that we've
1585 already checked for no side effects. */
1586 /* ??? FIXME: Magic number 5. */
1587 if (cse_not_expected
1588 && MEM_P (a) && MEM_P (b)
1589 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1590 && if_info->branch_cost >= 5)
1592 machine_mode address_mode = get_address_mode (a);
1594 a = XEXP (a, 0);
1595 b = XEXP (b, 0);
1596 x = gen_reg_rtx (address_mode);
1597 is_mem = 1;
1600 /* ??? We could handle this if we knew that a load from A or B could
1601 not trap or fault. This is also true if we've already loaded
1602 from the address along the path from ENTRY. */
1603 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1604 return FALSE;
1606 /* if (test) x = a + b; else x = c - d;
1607 => y = a + b;
1608 x = c - d;
1609 if (test)
1610 x = y;
1613 code = GET_CODE (if_info->cond);
1614 insn_a = if_info->insn_a;
1615 insn_b = if_info->insn_b;
1617 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1618 if insn_rtx_cost can't be estimated. */
1619 if (insn_a)
1621 insn_cost
1622 = insn_rtx_cost (PATTERN (insn_a),
1623 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1624 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1625 return FALSE;
1627 else
1628 insn_cost = 0;
1630 if (insn_b)
1632 insn_cost
1633 += insn_rtx_cost (PATTERN (insn_b),
1634 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1635 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1636 return FALSE;
1639 /* Possibly rearrange operands to make things come out more natural. */
1640 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1642 int reversep = 0;
1643 if (rtx_equal_p (b, x))
1644 reversep = 1;
1645 else if (general_operand (b, GET_MODE (b)))
1646 reversep = 1;
1648 if (reversep)
1650 rtx tmp;
1651 rtx_insn *tmp_insn;
1652 code = reversed_comparison_code (if_info->cond, if_info->jump);
1653 tmp = a, a = b, b = tmp;
1654 tmp_insn = insn_a, insn_a = insn_b, insn_b = tmp_insn;
1658 start_sequence ();
1660 orig_a = a;
1661 orig_b = b;
1663 /* If either operand is complex, load it into a register first.
1664 The best way to do this is to copy the original insn. In this
1665 way we preserve any clobbers etc that the insn may have had.
1666 This is of course not possible in the IS_MEM case. */
1667 if (! general_operand (a, GET_MODE (a)))
1669 rtx_insn *insn;
1671 if (is_mem)
1673 rtx reg = gen_reg_rtx (GET_MODE (a));
1674 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, a));
1676 else if (! insn_a)
1677 goto end_seq_and_fail;
1678 else
1680 a = gen_reg_rtx (GET_MODE (a));
1681 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1682 rtx set = single_set (copy_of_a);
1683 SET_DEST (set) = a;
1684 insn = emit_insn (PATTERN (copy_of_a));
1686 if (recog_memoized (insn) < 0)
1687 goto end_seq_and_fail;
1689 if (! general_operand (b, GET_MODE (b)))
1691 rtx pat;
1692 rtx_insn *last;
1693 rtx_insn *new_insn;
1695 if (is_mem)
1697 rtx reg = gen_reg_rtx (GET_MODE (b));
1698 pat = gen_rtx_SET (VOIDmode, reg, b);
1700 else if (! insn_b)
1701 goto end_seq_and_fail;
1702 else
1704 b = gen_reg_rtx (GET_MODE (b));
1705 rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1706 rtx set = single_set (copy_of_insn_b);
1707 SET_DEST (set) = b;
1708 pat = PATTERN (copy_of_insn_b);
1711 /* If insn to set up A clobbers any registers B depends on, try to
1712 swap insn that sets up A with the one that sets up B. If even
1713 that doesn't help, punt. */
1714 last = get_last_insn ();
1715 if (last && modified_in_p (orig_b, last))
1717 new_insn = emit_insn_before (pat, get_insns ());
1718 if (modified_in_p (orig_a, new_insn))
1719 goto end_seq_and_fail;
1721 else
1722 new_insn = emit_insn (pat);
1724 if (recog_memoized (new_insn) < 0)
1725 goto end_seq_and_fail;
1728 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1729 XEXP (if_info->cond, 1), a, b);
1731 if (! target)
1732 goto end_seq_and_fail;
1734 /* If we're handling a memory for above, emit the load now. */
1735 if (is_mem)
1737 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1739 /* Copy over flags as appropriate. */
1740 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1741 MEM_VOLATILE_P (mem) = 1;
1742 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1743 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1744 set_mem_align (mem,
1745 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1747 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1748 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1750 noce_emit_move_insn (if_info->x, mem);
1752 else if (target != x)
1753 noce_emit_move_insn (x, target);
1755 ifcvt_seq = end_ifcvt_sequence (if_info);
1756 if (!ifcvt_seq)
1757 return FALSE;
1759 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1760 INSN_LOCATION (if_info->insn_a));
1761 return TRUE;
1763 end_seq_and_fail:
1764 end_sequence ();
1765 return FALSE;
1768 /* For most cases, the simplified condition we found is the best
1769 choice, but this is not the case for the min/max/abs transforms.
1770 For these we wish to know that it is A or B in the condition. */
1772 static rtx
1773 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1774 rtx_insn **earliest)
1776 rtx cond, set;
1777 rtx_insn *insn;
1778 int reverse;
1780 /* If target is already mentioned in the known condition, return it. */
1781 if (reg_mentioned_p (target, if_info->cond))
1783 *earliest = if_info->cond_earliest;
1784 return if_info->cond;
1787 set = pc_set (if_info->jump);
1788 cond = XEXP (SET_SRC (set), 0);
1789 reverse
1790 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1791 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1792 if (if_info->then_else_reversed)
1793 reverse = !reverse;
1795 /* If we're looking for a constant, try to make the conditional
1796 have that constant in it. There are two reasons why it may
1797 not have the constant we want:
1799 1. GCC may have needed to put the constant in a register, because
1800 the target can't compare directly against that constant. For
1801 this case, we look for a SET immediately before the comparison
1802 that puts a constant in that register.
1804 2. GCC may have canonicalized the conditional, for example
1805 replacing "if x < 4" with "if x <= 3". We can undo that (or
1806 make equivalent types of changes) to get the constants we need
1807 if they're off by one in the right direction. */
1809 if (CONST_INT_P (target))
1811 enum rtx_code code = GET_CODE (if_info->cond);
1812 rtx op_a = XEXP (if_info->cond, 0);
1813 rtx op_b = XEXP (if_info->cond, 1);
1814 rtx prev_insn;
1816 /* First, look to see if we put a constant in a register. */
1817 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1818 if (prev_insn
1819 && BLOCK_FOR_INSN (prev_insn)
1820 == BLOCK_FOR_INSN (if_info->cond_earliest)
1821 && INSN_P (prev_insn)
1822 && GET_CODE (PATTERN (prev_insn)) == SET)
1824 rtx src = find_reg_equal_equiv_note (prev_insn);
1825 if (!src)
1826 src = SET_SRC (PATTERN (prev_insn));
1827 if (CONST_INT_P (src))
1829 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1830 op_a = src;
1831 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1832 op_b = src;
1834 if (CONST_INT_P (op_a))
1836 rtx tmp = op_a;
1837 op_a = op_b;
1838 op_b = tmp;
1839 code = swap_condition (code);
1844 /* Now, look to see if we can get the right constant by
1845 adjusting the conditional. */
1846 if (CONST_INT_P (op_b))
1848 HOST_WIDE_INT desired_val = INTVAL (target);
1849 HOST_WIDE_INT actual_val = INTVAL (op_b);
1851 switch (code)
1853 case LT:
1854 if (actual_val == desired_val + 1)
1856 code = LE;
1857 op_b = GEN_INT (desired_val);
1859 break;
1860 case LE:
1861 if (actual_val == desired_val - 1)
1863 code = LT;
1864 op_b = GEN_INT (desired_val);
1866 break;
1867 case GT:
1868 if (actual_val == desired_val - 1)
1870 code = GE;
1871 op_b = GEN_INT (desired_val);
1873 break;
1874 case GE:
1875 if (actual_val == desired_val + 1)
1877 code = GT;
1878 op_b = GEN_INT (desired_val);
1880 break;
1881 default:
1882 break;
1886 /* If we made any changes, generate a new conditional that is
1887 equivalent to what we started with, but has the right
1888 constants in it. */
1889 if (code != GET_CODE (if_info->cond)
1890 || op_a != XEXP (if_info->cond, 0)
1891 || op_b != XEXP (if_info->cond, 1))
1893 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1894 *earliest = if_info->cond_earliest;
1895 return cond;
1899 cond = canonicalize_condition (if_info->jump, cond, reverse,
1900 earliest, target, false, true);
1901 if (! cond || ! reg_mentioned_p (target, cond))
1902 return NULL;
1904 /* We almost certainly searched back to a different place.
1905 Need to re-verify correct lifetimes. */
1907 /* X may not be mentioned in the range (cond_earliest, jump]. */
1908 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1909 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1910 return NULL;
1912 /* A and B may not be modified in the range [cond_earliest, jump). */
1913 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1914 if (INSN_P (insn)
1915 && (modified_in_p (if_info->a, insn)
1916 || modified_in_p (if_info->b, insn)))
1917 return NULL;
1919 return cond;
1922 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1924 static int
1925 noce_try_minmax (struct noce_if_info *if_info)
1927 rtx cond, target;
1928 rtx_insn *earliest, *seq;
1929 enum rtx_code code, op;
1930 int unsignedp;
1932 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1933 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1934 to get the target to tell us... */
1935 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1936 || HONOR_NANS (GET_MODE (if_info->x)))
1937 return FALSE;
1939 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1940 if (!cond)
1941 return FALSE;
1943 /* Verify the condition is of the form we expect, and canonicalize
1944 the comparison code. */
1945 code = GET_CODE (cond);
1946 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1948 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1949 return FALSE;
1951 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1953 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1954 return FALSE;
1955 code = swap_condition (code);
1957 else
1958 return FALSE;
1960 /* Determine what sort of operation this is. Note that the code is for
1961 a taken branch, so the code->operation mapping appears backwards. */
1962 switch (code)
1964 case LT:
1965 case LE:
1966 case UNLT:
1967 case UNLE:
1968 op = SMAX;
1969 unsignedp = 0;
1970 break;
1971 case GT:
1972 case GE:
1973 case UNGT:
1974 case UNGE:
1975 op = SMIN;
1976 unsignedp = 0;
1977 break;
1978 case LTU:
1979 case LEU:
1980 op = UMAX;
1981 unsignedp = 1;
1982 break;
1983 case GTU:
1984 case GEU:
1985 op = UMIN;
1986 unsignedp = 1;
1987 break;
1988 default:
1989 return FALSE;
1992 start_sequence ();
1994 target = expand_simple_binop (GET_MODE (if_info->x), op,
1995 if_info->a, if_info->b,
1996 if_info->x, unsignedp, OPTAB_WIDEN);
1997 if (! target)
1999 end_sequence ();
2000 return FALSE;
2002 if (target != if_info->x)
2003 noce_emit_move_insn (if_info->x, target);
2005 seq = end_ifcvt_sequence (if_info);
2006 if (!seq)
2007 return FALSE;
2009 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2010 if_info->cond = cond;
2011 if_info->cond_earliest = earliest;
2013 return TRUE;
2016 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2017 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2018 etc. */
2020 static int
2021 noce_try_abs (struct noce_if_info *if_info)
2023 rtx cond, target, a, b, c;
2024 rtx_insn *earliest, *seq;
2025 int negate;
2026 bool one_cmpl = false;
2028 /* Reject modes with signed zeros. */
2029 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
2030 return FALSE;
2032 /* Recognize A and B as constituting an ABS or NABS. The canonical
2033 form is a branch around the negation, taken when the object is the
2034 first operand of a comparison against 0 that evaluates to true. */
2035 a = if_info->a;
2036 b = if_info->b;
2037 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2038 negate = 0;
2039 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2041 c = a; a = b; b = c;
2042 negate = 1;
2044 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2046 negate = 0;
2047 one_cmpl = true;
2049 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2051 c = a; a = b; b = c;
2052 negate = 1;
2053 one_cmpl = true;
2055 else
2056 return FALSE;
2058 cond = noce_get_alt_condition (if_info, b, &earliest);
2059 if (!cond)
2060 return FALSE;
2062 /* Verify the condition is of the form we expect. */
2063 if (rtx_equal_p (XEXP (cond, 0), b))
2064 c = XEXP (cond, 1);
2065 else if (rtx_equal_p (XEXP (cond, 1), b))
2067 c = XEXP (cond, 0);
2068 negate = !negate;
2070 else
2071 return FALSE;
2073 /* Verify that C is zero. Search one step backward for a
2074 REG_EQUAL note or a simple source if necessary. */
2075 if (REG_P (c))
2077 rtx set;
2078 rtx_insn *insn = prev_nonnote_insn (earliest);
2079 if (insn
2080 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2081 && (set = single_set (insn))
2082 && rtx_equal_p (SET_DEST (set), c))
2084 rtx note = find_reg_equal_equiv_note (insn);
2085 if (note)
2086 c = XEXP (note, 0);
2087 else
2088 c = SET_SRC (set);
2090 else
2091 return FALSE;
2093 if (MEM_P (c)
2094 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2095 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2096 c = get_pool_constant (XEXP (c, 0));
2098 /* Work around funny ideas get_condition has wrt canonicalization.
2099 Note that these rtx constants are known to be CONST_INT, and
2100 therefore imply integer comparisons. */
2101 if (c == constm1_rtx && GET_CODE (cond) == GT)
2103 else if (c == const1_rtx && GET_CODE (cond) == LT)
2105 else if (c != CONST0_RTX (GET_MODE (b)))
2106 return FALSE;
2108 /* Determine what sort of operation this is. */
2109 switch (GET_CODE (cond))
2111 case LT:
2112 case LE:
2113 case UNLT:
2114 case UNLE:
2115 negate = !negate;
2116 break;
2117 case GT:
2118 case GE:
2119 case UNGT:
2120 case UNGE:
2121 break;
2122 default:
2123 return FALSE;
2126 start_sequence ();
2127 if (one_cmpl)
2128 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2129 if_info->x);
2130 else
2131 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2133 /* ??? It's a quandary whether cmove would be better here, especially
2134 for integers. Perhaps combine will clean things up. */
2135 if (target && negate)
2137 if (one_cmpl)
2138 target = expand_simple_unop (GET_MODE (target), NOT, target,
2139 if_info->x, 0);
2140 else
2141 target = expand_simple_unop (GET_MODE (target), NEG, target,
2142 if_info->x, 0);
2145 if (! target)
2147 end_sequence ();
2148 return FALSE;
2151 if (target != if_info->x)
2152 noce_emit_move_insn (if_info->x, target);
2154 seq = end_ifcvt_sequence (if_info);
2155 if (!seq)
2156 return FALSE;
2158 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2159 if_info->cond = cond;
2160 if_info->cond_earliest = earliest;
2162 return TRUE;
2165 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2167 static int
2168 noce_try_sign_mask (struct noce_if_info *if_info)
2170 rtx cond, t, m, c;
2171 rtx_insn *seq;
2172 machine_mode mode;
2173 enum rtx_code code;
2174 bool t_unconditional;
2176 cond = if_info->cond;
2177 code = GET_CODE (cond);
2178 m = XEXP (cond, 0);
2179 c = XEXP (cond, 1);
2181 t = NULL_RTX;
2182 if (if_info->a == const0_rtx)
2184 if ((code == LT && c == const0_rtx)
2185 || (code == LE && c == constm1_rtx))
2186 t = if_info->b;
2188 else if (if_info->b == const0_rtx)
2190 if ((code == GE && c == const0_rtx)
2191 || (code == GT && c == constm1_rtx))
2192 t = if_info->a;
2195 if (! t || side_effects_p (t))
2196 return FALSE;
2198 /* We currently don't handle different modes. */
2199 mode = GET_MODE (t);
2200 if (GET_MODE (m) != mode)
2201 return FALSE;
2203 /* This is only profitable if T is unconditionally executed/evaluated in the
2204 original insn sequence or T is cheap. The former happens if B is the
2205 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2206 INSN_B which can happen for e.g. conditional stores to memory. For the
2207 cost computation use the block TEST_BB where the evaluation will end up
2208 after the transformation. */
2209 t_unconditional =
2210 (t == if_info->b
2211 && (if_info->insn_b == NULL_RTX
2212 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2213 if (!(t_unconditional
2214 || (set_src_cost (t, optimize_bb_for_speed_p (if_info->test_bb))
2215 < COSTS_N_INSNS (2))))
2216 return FALSE;
2218 start_sequence ();
2219 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2220 "(signed) m >> 31" directly. This benefits targets with specialized
2221 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2222 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2223 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2224 : NULL_RTX;
2226 if (!t)
2228 end_sequence ();
2229 return FALSE;
2232 noce_emit_move_insn (if_info->x, t);
2234 seq = end_ifcvt_sequence (if_info);
2235 if (!seq)
2236 return FALSE;
2238 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2239 return TRUE;
2243 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2244 transformations. */
2246 static int
2247 noce_try_bitop (struct noce_if_info *if_info)
2249 rtx cond, x, a, result;
2250 rtx_insn *seq;
2251 machine_mode mode;
2252 enum rtx_code code;
2253 int bitnum;
2255 x = if_info->x;
2256 cond = if_info->cond;
2257 code = GET_CODE (cond);
2259 /* Check for no else condition. */
2260 if (! rtx_equal_p (x, if_info->b))
2261 return FALSE;
2263 /* Check for a suitable condition. */
2264 if (code != NE && code != EQ)
2265 return FALSE;
2266 if (XEXP (cond, 1) != const0_rtx)
2267 return FALSE;
2268 cond = XEXP (cond, 0);
2270 /* ??? We could also handle AND here. */
2271 if (GET_CODE (cond) == ZERO_EXTRACT)
2273 if (XEXP (cond, 1) != const1_rtx
2274 || !CONST_INT_P (XEXP (cond, 2))
2275 || ! rtx_equal_p (x, XEXP (cond, 0)))
2276 return FALSE;
2277 bitnum = INTVAL (XEXP (cond, 2));
2278 mode = GET_MODE (x);
2279 if (BITS_BIG_ENDIAN)
2280 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2281 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2282 return FALSE;
2284 else
2285 return FALSE;
2287 a = if_info->a;
2288 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2290 /* Check for "if (X & C) x = x op C". */
2291 if (! rtx_equal_p (x, XEXP (a, 0))
2292 || !CONST_INT_P (XEXP (a, 1))
2293 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2294 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2295 return FALSE;
2297 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2298 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2299 if (GET_CODE (a) == IOR)
2300 result = (code == NE) ? a : NULL_RTX;
2301 else if (code == NE)
2303 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2304 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2305 result = simplify_gen_binary (IOR, mode, x, result);
2307 else
2309 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2310 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2311 result = simplify_gen_binary (AND, mode, x, result);
2314 else if (GET_CODE (a) == AND)
2316 /* Check for "if (X & C) x &= ~C". */
2317 if (! rtx_equal_p (x, XEXP (a, 0))
2318 || !CONST_INT_P (XEXP (a, 1))
2319 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2320 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2321 return FALSE;
2323 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2324 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2325 result = (code == EQ) ? a : NULL_RTX;
2327 else
2328 return FALSE;
2330 if (result)
2332 start_sequence ();
2333 noce_emit_move_insn (x, result);
2334 seq = end_ifcvt_sequence (if_info);
2335 if (!seq)
2336 return FALSE;
2338 emit_insn_before_setloc (seq, if_info->jump,
2339 INSN_LOCATION (if_info->insn_a));
2341 return TRUE;
2345 /* Similar to get_condition, only the resulting condition must be
2346 valid at JUMP, instead of at EARLIEST.
2348 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2349 THEN block of the caller, and we have to reverse the condition. */
2351 static rtx
2352 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2354 rtx cond, set, tmp;
2355 bool reverse;
2357 if (! any_condjump_p (jump))
2358 return NULL_RTX;
2360 set = pc_set (jump);
2362 /* If this branches to JUMP_LABEL when the condition is false,
2363 reverse the condition. */
2364 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2365 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2367 /* We may have to reverse because the caller's if block is not canonical,
2368 i.e. the THEN block isn't the fallthrough block for the TEST block
2369 (see find_if_header). */
2370 if (then_else_reversed)
2371 reverse = !reverse;
2373 /* If the condition variable is a register and is MODE_INT, accept it. */
2375 cond = XEXP (SET_SRC (set), 0);
2376 tmp = XEXP (cond, 0);
2377 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2378 && (GET_MODE (tmp) != BImode
2379 || !targetm.small_register_classes_for_mode_p (BImode)))
2381 *earliest = jump;
2383 if (reverse)
2384 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2385 GET_MODE (cond), tmp, XEXP (cond, 1));
2386 return cond;
2389 /* Otherwise, fall back on canonicalize_condition to do the dirty
2390 work of manipulating MODE_CC values and COMPARE rtx codes. */
2391 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2392 NULL_RTX, false, true);
2394 /* We don't handle side-effects in the condition, like handling
2395 REG_INC notes and making sure no duplicate conditions are emitted. */
2396 if (tmp != NULL_RTX && side_effects_p (tmp))
2397 return NULL_RTX;
2399 return tmp;
2402 /* Return true if OP is ok for if-then-else processing. */
2404 static int
2405 noce_operand_ok (const_rtx op)
2407 if (side_effects_p (op))
2408 return FALSE;
2410 /* We special-case memories, so handle any of them with
2411 no address side effects. */
2412 if (MEM_P (op))
2413 return ! side_effects_p (XEXP (op, 0));
2415 return ! may_trap_p (op);
2418 /* Return true if a write into MEM may trap or fault. */
2420 static bool
2421 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2423 rtx addr;
2425 if (MEM_READONLY_P (mem))
2426 return true;
2428 if (may_trap_or_fault_p (mem))
2429 return true;
2431 addr = XEXP (mem, 0);
2433 /* Call target hook to avoid the effects of -fpic etc.... */
2434 addr = targetm.delegitimize_address (addr);
2436 while (addr)
2437 switch (GET_CODE (addr))
2439 case CONST:
2440 case PRE_DEC:
2441 case PRE_INC:
2442 case POST_DEC:
2443 case POST_INC:
2444 case POST_MODIFY:
2445 addr = XEXP (addr, 0);
2446 break;
2447 case LO_SUM:
2448 case PRE_MODIFY:
2449 addr = XEXP (addr, 1);
2450 break;
2451 case PLUS:
2452 if (CONST_INT_P (XEXP (addr, 1)))
2453 addr = XEXP (addr, 0);
2454 else
2455 return false;
2456 break;
2457 case LABEL_REF:
2458 return true;
2459 case SYMBOL_REF:
2460 if (SYMBOL_REF_DECL (addr)
2461 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2462 return true;
2463 return false;
2464 default:
2465 return false;
2468 return false;
2471 /* Return whether we can use store speculation for MEM. TOP_BB is the
2472 basic block above the conditional block where we are considering
2473 doing the speculative store. We look for whether MEM is set
2474 unconditionally later in the function. */
2476 static bool
2477 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2479 basic_block dominator;
2481 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2482 dominator != NULL;
2483 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2485 rtx_insn *insn;
2487 FOR_BB_INSNS (dominator, insn)
2489 /* If we see something that might be a memory barrier, we
2490 have to stop looking. Even if the MEM is set later in
2491 the function, we still don't want to set it
2492 unconditionally before the barrier. */
2493 if (INSN_P (insn)
2494 && (volatile_insn_p (PATTERN (insn))
2495 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2496 return false;
2498 if (memory_must_be_modified_in_insn_p (mem, insn))
2499 return true;
2500 if (modified_in_p (XEXP (mem, 0), insn))
2501 return false;
2506 return false;
2509 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2510 it without using conditional execution. Return TRUE if we were successful
2511 at converting the block. */
2513 static int
2514 noce_process_if_block (struct noce_if_info *if_info)
2516 basic_block test_bb = if_info->test_bb; /* test block */
2517 basic_block then_bb = if_info->then_bb; /* THEN */
2518 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2519 basic_block join_bb = if_info->join_bb; /* JOIN */
2520 rtx_insn *jump = if_info->jump;
2521 rtx cond = if_info->cond;
2522 rtx_insn *insn_a, *insn_b;
2523 rtx set_a, set_b;
2524 rtx orig_x, x, a, b;
2526 /* We're looking for patterns of the form
2528 (1) if (...) x = a; else x = b;
2529 (2) x = b; if (...) x = a;
2530 (3) if (...) x = a; // as if with an initial x = x.
2532 The later patterns require jumps to be more expensive.
2534 ??? For future expansion, look for multiple X in such patterns. */
2536 /* Look for one of the potential sets. */
2537 insn_a = first_active_insn (then_bb);
2538 if (! insn_a
2539 || insn_a != last_active_insn (then_bb, FALSE)
2540 || (set_a = single_set (insn_a)) == NULL_RTX)
2541 return FALSE;
2543 x = SET_DEST (set_a);
2544 a = SET_SRC (set_a);
2546 /* Look for the other potential set. Make sure we've got equivalent
2547 destinations. */
2548 /* ??? This is overconservative. Storing to two different mems is
2549 as easy as conditionally computing the address. Storing to a
2550 single mem merely requires a scratch memory to use as one of the
2551 destination addresses; often the memory immediately below the
2552 stack pointer is available for this. */
2553 set_b = NULL_RTX;
2554 if (else_bb)
2556 insn_b = first_active_insn (else_bb);
2557 if (! insn_b
2558 || insn_b != last_active_insn (else_bb, FALSE)
2559 || (set_b = single_set (insn_b)) == NULL_RTX
2560 || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2561 return FALSE;
2563 else
2565 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2566 /* We're going to be moving the evaluation of B down from above
2567 COND_EARLIEST to JUMP. Make sure the relevant data is still
2568 intact. */
2569 if (! insn_b
2570 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2571 || !NONJUMP_INSN_P (insn_b)
2572 || (set_b = single_set (insn_b)) == NULL_RTX
2573 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2574 || ! noce_operand_ok (SET_SRC (set_b))
2575 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2576 || modified_between_p (SET_SRC (set_b), insn_b, jump)
2577 /* Avoid extending the lifetime of hard registers on small
2578 register class machines. */
2579 || (REG_P (SET_SRC (set_b))
2580 && HARD_REGISTER_P (SET_SRC (set_b))
2581 && targetm.small_register_classes_for_mode_p
2582 (GET_MODE (SET_SRC (set_b))))
2583 /* Likewise with X. In particular this can happen when
2584 noce_get_condition looks farther back in the instruction
2585 stream than one might expect. */
2586 || reg_overlap_mentioned_p (x, cond)
2587 || reg_overlap_mentioned_p (x, a)
2588 || modified_between_p (x, insn_b, jump))
2590 insn_b = NULL;
2591 set_b = NULL_RTX;
2595 /* If x has side effects then only the if-then-else form is safe to
2596 convert. But even in that case we would need to restore any notes
2597 (such as REG_INC) at then end. That can be tricky if
2598 noce_emit_move_insn expands to more than one insn, so disable the
2599 optimization entirely for now if there are side effects. */
2600 if (side_effects_p (x))
2601 return FALSE;
2603 b = (set_b ? SET_SRC (set_b) : x);
2605 /* Only operate on register destinations, and even then avoid extending
2606 the lifetime of hard registers on small register class machines. */
2607 orig_x = x;
2608 if (!REG_P (x)
2609 || (HARD_REGISTER_P (x)
2610 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2612 if (GET_MODE (x) == BLKmode)
2613 return FALSE;
2615 if (GET_CODE (x) == ZERO_EXTRACT
2616 && (!CONST_INT_P (XEXP (x, 1))
2617 || !CONST_INT_P (XEXP (x, 2))))
2618 return FALSE;
2620 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2621 ? XEXP (x, 0) : x));
2624 /* Don't operate on sources that may trap or are volatile. */
2625 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2626 return FALSE;
2628 retry:
2629 /* Set up the info block for our subroutines. */
2630 if_info->insn_a = insn_a;
2631 if_info->insn_b = insn_b;
2632 if_info->x = x;
2633 if_info->a = a;
2634 if_info->b = b;
2636 /* Try optimizations in some approximation of a useful order. */
2637 /* ??? Should first look to see if X is live incoming at all. If it
2638 isn't, we don't need anything but an unconditional set. */
2640 /* Look and see if A and B are really the same. Avoid creating silly
2641 cmove constructs that no one will fix up later. */
2642 if (rtx_interchangeable_p (a, b))
2644 /* If we have an INSN_B, we don't have to create any new rtl. Just
2645 move the instruction that we already have. If we don't have an
2646 INSN_B, that means that A == X, and we've got a noop move. In
2647 that case don't do anything and let the code below delete INSN_A. */
2648 if (insn_b && else_bb)
2650 rtx note;
2652 if (else_bb && insn_b == BB_END (else_bb))
2653 BB_END (else_bb) = PREV_INSN (insn_b);
2654 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2656 /* If there was a REG_EQUAL note, delete it since it may have been
2657 true due to this insn being after a jump. */
2658 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2659 remove_note (insn_b, note);
2661 insn_b = NULL;
2663 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2664 x must be executed twice. */
2665 else if (insn_b && side_effects_p (orig_x))
2666 return FALSE;
2668 x = orig_x;
2669 goto success;
2672 if (!set_b && MEM_P (orig_x))
2674 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2675 for optimizations if writing to x may trap or fault,
2676 i.e. it's a memory other than a static var or a stack slot,
2677 is misaligned on strict aligned machines or is read-only. If
2678 x is a read-only memory, then the program is valid only if we
2679 avoid the store into it. If there are stores on both the
2680 THEN and ELSE arms, then we can go ahead with the conversion;
2681 either the program is broken, or the condition is always
2682 false such that the other memory is selected. */
2683 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2684 return FALSE;
2686 /* Avoid store speculation: given "if (...) x = a" where x is a
2687 MEM, we only want to do the store if x is always set
2688 somewhere in the function. This avoids cases like
2689 if (pthread_mutex_trylock(mutex))
2690 ++global_variable;
2691 where we only want global_variable to be changed if the mutex
2692 is held. FIXME: This should ideally be expressed directly in
2693 RTL somehow. */
2694 if (!noce_can_store_speculate_p (test_bb, orig_x))
2695 return FALSE;
2698 if (noce_try_move (if_info))
2699 goto success;
2700 if (noce_try_store_flag (if_info))
2701 goto success;
2702 if (noce_try_bitop (if_info))
2703 goto success;
2704 if (noce_try_minmax (if_info))
2705 goto success;
2706 if (noce_try_abs (if_info))
2707 goto success;
2708 if (HAVE_conditional_move
2709 && noce_try_cmove (if_info))
2710 goto success;
2711 if (! targetm.have_conditional_execution ())
2713 if (noce_try_store_flag_constants (if_info))
2714 goto success;
2715 if (noce_try_addcc (if_info))
2716 goto success;
2717 if (noce_try_store_flag_mask (if_info))
2718 goto success;
2719 if (HAVE_conditional_move
2720 && noce_try_cmove_arith (if_info))
2721 goto success;
2722 if (noce_try_sign_mask (if_info))
2723 goto success;
2726 if (!else_bb && set_b)
2728 insn_b = NULL;
2729 set_b = NULL_RTX;
2730 b = orig_x;
2731 goto retry;
2734 return FALSE;
2736 success:
2738 /* If we used a temporary, fix it up now. */
2739 if (orig_x != x)
2741 rtx_insn *seq;
2743 start_sequence ();
2744 noce_emit_move_insn (orig_x, x);
2745 seq = get_insns ();
2746 set_used_flags (orig_x);
2747 unshare_all_rtl_in_chain (seq);
2748 end_sequence ();
2750 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2753 /* The original THEN and ELSE blocks may now be removed. The test block
2754 must now jump to the join block. If the test block and the join block
2755 can be merged, do so. */
2756 if (else_bb)
2758 delete_basic_block (else_bb);
2759 num_true_changes++;
2761 else
2762 remove_edge (find_edge (test_bb, join_bb));
2764 remove_edge (find_edge (then_bb, join_bb));
2765 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2766 delete_basic_block (then_bb);
2767 num_true_changes++;
2769 if (can_merge_blocks_p (test_bb, join_bb))
2771 merge_blocks (test_bb, join_bb);
2772 num_true_changes++;
2775 num_updated_if_blocks++;
2776 return TRUE;
2779 /* Check whether a block is suitable for conditional move conversion.
2780 Every insn must be a simple set of a register to a constant or a
2781 register. For each assignment, store the value in the pointer map
2782 VALS, keyed indexed by register pointer, then store the register
2783 pointer in REGS. COND is the condition we will test. */
2785 static int
2786 check_cond_move_block (basic_block bb,
2787 hash_map<rtx, rtx> *vals,
2788 vec<rtx> *regs,
2789 rtx cond)
2791 rtx_insn *insn;
2793 /* We can only handle simple jumps at the end of the basic block.
2794 It is almost impossible to update the CFG otherwise. */
2795 insn = BB_END (bb);
2796 if (JUMP_P (insn) && !onlyjump_p (insn))
2797 return FALSE;
2799 FOR_BB_INSNS (bb, insn)
2801 rtx set, dest, src;
2803 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2804 continue;
2805 set = single_set (insn);
2806 if (!set)
2807 return FALSE;
2809 dest = SET_DEST (set);
2810 src = SET_SRC (set);
2811 if (!REG_P (dest)
2812 || (HARD_REGISTER_P (dest)
2813 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2814 return FALSE;
2816 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2817 return FALSE;
2819 if (side_effects_p (src) || side_effects_p (dest))
2820 return FALSE;
2822 if (may_trap_p (src) || may_trap_p (dest))
2823 return FALSE;
2825 /* Don't try to handle this if the source register was
2826 modified earlier in the block. */
2827 if ((REG_P (src)
2828 && vals->get (src))
2829 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2830 && vals->get (SUBREG_REG (src))))
2831 return FALSE;
2833 /* Don't try to handle this if the destination register was
2834 modified earlier in the block. */
2835 if (vals->get (dest))
2836 return FALSE;
2838 /* Don't try to handle this if the condition uses the
2839 destination register. */
2840 if (reg_overlap_mentioned_p (dest, cond))
2841 return FALSE;
2843 /* Don't try to handle this if the source register is modified
2844 later in the block. */
2845 if (!CONSTANT_P (src)
2846 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2847 return FALSE;
2849 vals->put (dest, src);
2851 regs->safe_push (dest);
2854 return TRUE;
2857 /* Given a basic block BB suitable for conditional move conversion,
2858 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
2859 the register values depending on COND, emit the insns in the block as
2860 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2861 processed. The caller has started a sequence for the conversion.
2862 Return true if successful, false if something goes wrong. */
2864 static bool
2865 cond_move_convert_if_block (struct noce_if_info *if_infop,
2866 basic_block bb, rtx cond,
2867 hash_map<rtx, rtx> *then_vals,
2868 hash_map<rtx, rtx> *else_vals,
2869 bool else_block_p)
2871 enum rtx_code code;
2872 rtx_insn *insn;
2873 rtx cond_arg0, cond_arg1;
2875 code = GET_CODE (cond);
2876 cond_arg0 = XEXP (cond, 0);
2877 cond_arg1 = XEXP (cond, 1);
2879 FOR_BB_INSNS (bb, insn)
2881 rtx set, target, dest, t, e;
2883 /* ??? Maybe emit conditional debug insn? */
2884 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2885 continue;
2886 set = single_set (insn);
2887 gcc_assert (set && REG_P (SET_DEST (set)));
2889 dest = SET_DEST (set);
2891 rtx *then_slot = then_vals->get (dest);
2892 rtx *else_slot = else_vals->get (dest);
2893 t = then_slot ? *then_slot : NULL_RTX;
2894 e = else_slot ? *else_slot : NULL_RTX;
2896 if (else_block_p)
2898 /* If this register was set in the then block, we already
2899 handled this case there. */
2900 if (t)
2901 continue;
2902 t = dest;
2903 gcc_assert (e);
2905 else
2907 gcc_assert (t);
2908 if (!e)
2909 e = dest;
2912 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2913 t, e);
2914 if (!target)
2915 return false;
2917 if (target != dest)
2918 noce_emit_move_insn (dest, target);
2921 return true;
2924 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2925 it using only conditional moves. Return TRUE if we were successful at
2926 converting the block. */
2928 static int
2929 cond_move_process_if_block (struct noce_if_info *if_info)
2931 basic_block test_bb = if_info->test_bb;
2932 basic_block then_bb = if_info->then_bb;
2933 basic_block else_bb = if_info->else_bb;
2934 basic_block join_bb = if_info->join_bb;
2935 rtx_insn *jump = if_info->jump;
2936 rtx cond = if_info->cond;
2937 rtx_insn *seq, *loc_insn;
2938 rtx reg;
2939 int c;
2940 vec<rtx> then_regs = vNULL;
2941 vec<rtx> else_regs = vNULL;
2942 unsigned int i;
2943 int success_p = FALSE;
2945 /* Build a mapping for each block to the value used for each
2946 register. */
2947 hash_map<rtx, rtx> then_vals;
2948 hash_map<rtx, rtx> else_vals;
2950 /* Make sure the blocks are suitable. */
2951 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
2952 || (else_bb
2953 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
2954 goto done;
2956 /* Make sure the blocks can be used together. If the same register
2957 is set in both blocks, and is not set to a constant in both
2958 cases, then both blocks must set it to the same register. We
2959 have already verified that if it is set to a register, that the
2960 source register does not change after the assignment. Also count
2961 the number of registers set in only one of the blocks. */
2962 c = 0;
2963 FOR_EACH_VEC_ELT (then_regs, i, reg)
2965 rtx *then_slot = then_vals.get (reg);
2966 rtx *else_slot = else_vals.get (reg);
2968 gcc_checking_assert (then_slot);
2969 if (!else_slot)
2970 ++c;
2971 else
2973 rtx then_val = *then_slot;
2974 rtx else_val = *else_slot;
2975 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
2976 && !rtx_equal_p (then_val, else_val))
2977 goto done;
2981 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
2982 FOR_EACH_VEC_ELT (else_regs, i, reg)
2984 gcc_checking_assert (else_vals.get (reg));
2985 if (!then_vals.get (reg))
2986 ++c;
2989 /* Make sure it is reasonable to convert this block. What matters
2990 is the number of assignments currently made in only one of the
2991 branches, since if we convert we are going to always execute
2992 them. */
2993 if (c > MAX_CONDITIONAL_EXECUTE)
2994 goto done;
2996 /* Try to emit the conditional moves. First do the then block,
2997 then do anything left in the else blocks. */
2998 start_sequence ();
2999 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3000 &then_vals, &else_vals, false)
3001 || (else_bb
3002 && !cond_move_convert_if_block (if_info, else_bb, cond,
3003 &then_vals, &else_vals, true)))
3005 end_sequence ();
3006 goto done;
3008 seq = end_ifcvt_sequence (if_info);
3009 if (!seq)
3010 goto done;
3012 loc_insn = first_active_insn (then_bb);
3013 if (!loc_insn)
3015 loc_insn = first_active_insn (else_bb);
3016 gcc_assert (loc_insn);
3018 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3020 if (else_bb)
3022 delete_basic_block (else_bb);
3023 num_true_changes++;
3025 else
3026 remove_edge (find_edge (test_bb, join_bb));
3028 remove_edge (find_edge (then_bb, join_bb));
3029 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3030 delete_basic_block (then_bb);
3031 num_true_changes++;
3033 if (can_merge_blocks_p (test_bb, join_bb))
3035 merge_blocks (test_bb, join_bb);
3036 num_true_changes++;
3039 num_updated_if_blocks++;
3041 success_p = TRUE;
3043 done:
3044 then_regs.release ();
3045 else_regs.release ();
3046 return success_p;
3050 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3051 IF-THEN-ELSE-JOIN block.
3053 If so, we'll try to convert the insns to not require the branch,
3054 using only transformations that do not require conditional execution.
3056 Return TRUE if we were successful at converting the block. */
3058 static int
3059 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3060 int pass)
3062 basic_block then_bb, else_bb, join_bb;
3063 bool then_else_reversed = false;
3064 rtx_insn *jump;
3065 rtx cond;
3066 rtx_insn *cond_earliest;
3067 struct noce_if_info if_info;
3069 /* We only ever should get here before reload. */
3070 gcc_assert (!reload_completed);
3072 /* Recognize an IF-THEN-ELSE-JOIN block. */
3073 if (single_pred_p (then_edge->dest)
3074 && single_succ_p (then_edge->dest)
3075 && single_pred_p (else_edge->dest)
3076 && single_succ_p (else_edge->dest)
3077 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3079 then_bb = then_edge->dest;
3080 else_bb = else_edge->dest;
3081 join_bb = single_succ (then_bb);
3083 /* Recognize an IF-THEN-JOIN block. */
3084 else if (single_pred_p (then_edge->dest)
3085 && single_succ_p (then_edge->dest)
3086 && single_succ (then_edge->dest) == else_edge->dest)
3088 then_bb = then_edge->dest;
3089 else_bb = NULL_BLOCK;
3090 join_bb = else_edge->dest;
3092 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3093 of basic blocks in cfglayout mode does not matter, so the fallthrough
3094 edge can go to any basic block (and not just to bb->next_bb, like in
3095 cfgrtl mode). */
3096 else if (single_pred_p (else_edge->dest)
3097 && single_succ_p (else_edge->dest)
3098 && single_succ (else_edge->dest) == then_edge->dest)
3100 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3101 To make this work, we have to invert the THEN and ELSE blocks
3102 and reverse the jump condition. */
3103 then_bb = else_edge->dest;
3104 else_bb = NULL_BLOCK;
3105 join_bb = single_succ (then_bb);
3106 then_else_reversed = true;
3108 else
3109 /* Not a form we can handle. */
3110 return FALSE;
3112 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3113 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3114 return FALSE;
3115 if (else_bb
3116 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3117 return FALSE;
3119 num_possible_if_blocks++;
3121 if (dump_file)
3123 fprintf (dump_file,
3124 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3125 (else_bb) ? "-ELSE" : "",
3126 pass, test_bb->index, then_bb->index);
3128 if (else_bb)
3129 fprintf (dump_file, ", else %d", else_bb->index);
3131 fprintf (dump_file, ", join %d\n", join_bb->index);
3134 /* If the conditional jump is more than just a conditional
3135 jump, then we can not do if-conversion on this block. */
3136 jump = BB_END (test_bb);
3137 if (! onlyjump_p (jump))
3138 return FALSE;
3140 /* If this is not a standard conditional jump, we can't parse it. */
3141 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3142 if (!cond)
3143 return FALSE;
3145 /* We must be comparing objects whose modes imply the size. */
3146 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3147 return FALSE;
3149 /* Initialize an IF_INFO struct to pass around. */
3150 memset (&if_info, 0, sizeof if_info);
3151 if_info.test_bb = test_bb;
3152 if_info.then_bb = then_bb;
3153 if_info.else_bb = else_bb;
3154 if_info.join_bb = join_bb;
3155 if_info.cond = cond;
3156 if_info.cond_earliest = cond_earliest;
3157 if_info.jump = jump;
3158 if_info.then_else_reversed = then_else_reversed;
3159 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3160 predictable_edge_p (then_edge));
3162 /* Do the real work. */
3164 if (noce_process_if_block (&if_info))
3165 return TRUE;
3167 if (HAVE_conditional_move
3168 && cond_move_process_if_block (&if_info))
3169 return TRUE;
3171 return FALSE;
3175 /* Merge the blocks and mark for local life update. */
3177 static void
3178 merge_if_block (struct ce_if_block * ce_info)
3180 basic_block test_bb = ce_info->test_bb; /* last test block */
3181 basic_block then_bb = ce_info->then_bb; /* THEN */
3182 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3183 basic_block join_bb = ce_info->join_bb; /* join block */
3184 basic_block combo_bb;
3186 /* All block merging is done into the lower block numbers. */
3188 combo_bb = test_bb;
3189 df_set_bb_dirty (test_bb);
3191 /* Merge any basic blocks to handle && and || subtests. Each of
3192 the blocks are on the fallthru path from the predecessor block. */
3193 if (ce_info->num_multiple_test_blocks > 0)
3195 basic_block bb = test_bb;
3196 basic_block last_test_bb = ce_info->last_test_bb;
3197 basic_block fallthru = block_fallthru (bb);
3201 bb = fallthru;
3202 fallthru = block_fallthru (bb);
3203 merge_blocks (combo_bb, bb);
3204 num_true_changes++;
3206 while (bb != last_test_bb);
3209 /* Merge TEST block into THEN block. Normally the THEN block won't have a
3210 label, but it might if there were || tests. That label's count should be
3211 zero, and it normally should be removed. */
3213 if (then_bb)
3215 /* If THEN_BB has no successors, then there's a BARRIER after it.
3216 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3217 is no longer needed, and in fact it is incorrect to leave it in
3218 the insn stream. */
3219 if (EDGE_COUNT (then_bb->succs) == 0
3220 && EDGE_COUNT (combo_bb->succs) > 1)
3222 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3223 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3224 end = NEXT_INSN (end);
3226 if (end && BARRIER_P (end))
3227 delete_insn (end);
3229 merge_blocks (combo_bb, then_bb);
3230 num_true_changes++;
3233 /* The ELSE block, if it existed, had a label. That label count
3234 will almost always be zero, but odd things can happen when labels
3235 get their addresses taken. */
3236 if (else_bb)
3238 /* If ELSE_BB has no successors, then there's a BARRIER after it.
3239 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3240 is no longer needed, and in fact it is incorrect to leave it in
3241 the insn stream. */
3242 if (EDGE_COUNT (else_bb->succs) == 0
3243 && EDGE_COUNT (combo_bb->succs) > 1)
3245 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3246 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3247 end = NEXT_INSN (end);
3249 if (end && BARRIER_P (end))
3250 delete_insn (end);
3252 merge_blocks (combo_bb, else_bb);
3253 num_true_changes++;
3256 /* If there was no join block reported, that means it was not adjacent
3257 to the others, and so we cannot merge them. */
3259 if (! join_bb)
3261 rtx_insn *last = BB_END (combo_bb);
3263 /* The outgoing edge for the current COMBO block should already
3264 be correct. Verify this. */
3265 if (EDGE_COUNT (combo_bb->succs) == 0)
3266 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3267 || (NONJUMP_INSN_P (last)
3268 && GET_CODE (PATTERN (last)) == TRAP_IF
3269 && (TRAP_CONDITION (PATTERN (last))
3270 == const_true_rtx)));
3272 else
3273 /* There should still be something at the end of the THEN or ELSE
3274 blocks taking us to our final destination. */
3275 gcc_assert (JUMP_P (last)
3276 || (EDGE_SUCC (combo_bb, 0)->dest
3277 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3278 && CALL_P (last)
3279 && SIBLING_CALL_P (last))
3280 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3281 && can_throw_internal (last)));
3284 /* The JOIN block may have had quite a number of other predecessors too.
3285 Since we've already merged the TEST, THEN and ELSE blocks, we should
3286 have only one remaining edge from our if-then-else diamond. If there
3287 is more than one remaining edge, it must come from elsewhere. There
3288 may be zero incoming edges if the THEN block didn't actually join
3289 back up (as with a call to a non-return function). */
3290 else if (EDGE_COUNT (join_bb->preds) < 2
3291 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3293 /* We can merge the JOIN cleanly and update the dataflow try
3294 again on this pass.*/
3295 merge_blocks (combo_bb, join_bb);
3296 num_true_changes++;
3298 else
3300 /* We cannot merge the JOIN. */
3302 /* The outgoing edge for the current COMBO block should already
3303 be correct. Verify this. */
3304 gcc_assert (single_succ_p (combo_bb)
3305 && single_succ (combo_bb) == join_bb);
3307 /* Remove the jump and cruft from the end of the COMBO block. */
3308 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3309 tidy_fallthru_edge (single_succ_edge (combo_bb));
3312 num_updated_if_blocks++;
3315 /* Find a block ending in a simple IF condition and try to transform it
3316 in some way. When converting a multi-block condition, put the new code
3317 in the first such block and delete the rest. Return a pointer to this
3318 first block if some transformation was done. Return NULL otherwise. */
3320 static basic_block
3321 find_if_header (basic_block test_bb, int pass)
3323 ce_if_block ce_info;
3324 edge then_edge;
3325 edge else_edge;
3327 /* The kind of block we're looking for has exactly two successors. */
3328 if (EDGE_COUNT (test_bb->succs) != 2)
3329 return NULL;
3331 then_edge = EDGE_SUCC (test_bb, 0);
3332 else_edge = EDGE_SUCC (test_bb, 1);
3334 if (df_get_bb_dirty (then_edge->dest))
3335 return NULL;
3336 if (df_get_bb_dirty (else_edge->dest))
3337 return NULL;
3339 /* Neither edge should be abnormal. */
3340 if ((then_edge->flags & EDGE_COMPLEX)
3341 || (else_edge->flags & EDGE_COMPLEX))
3342 return NULL;
3344 /* Nor exit the loop. */
3345 if ((then_edge->flags & EDGE_LOOP_EXIT)
3346 || (else_edge->flags & EDGE_LOOP_EXIT))
3347 return NULL;
3349 /* The THEN edge is canonically the one that falls through. */
3350 if (then_edge->flags & EDGE_FALLTHRU)
3352 else if (else_edge->flags & EDGE_FALLTHRU)
3354 edge e = else_edge;
3355 else_edge = then_edge;
3356 then_edge = e;
3358 else
3359 /* Otherwise this must be a multiway branch of some sort. */
3360 return NULL;
3362 memset (&ce_info, 0, sizeof (ce_info));
3363 ce_info.test_bb = test_bb;
3364 ce_info.then_bb = then_edge->dest;
3365 ce_info.else_bb = else_edge->dest;
3366 ce_info.pass = pass;
3368 #ifdef IFCVT_MACHDEP_INIT
3369 IFCVT_MACHDEP_INIT (&ce_info);
3370 #endif
3372 if (!reload_completed
3373 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3374 goto success;
3376 if (reload_completed
3377 && targetm.have_conditional_execution ()
3378 && cond_exec_find_if_block (&ce_info))
3379 goto success;
3381 if (HAVE_trap
3382 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3383 && find_cond_trap (test_bb, then_edge, else_edge))
3384 goto success;
3386 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3387 && (reload_completed || !targetm.have_conditional_execution ()))
3389 if (find_if_case_1 (test_bb, then_edge, else_edge))
3390 goto success;
3391 if (find_if_case_2 (test_bb, then_edge, else_edge))
3392 goto success;
3395 return NULL;
3397 success:
3398 if (dump_file)
3399 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3400 /* Set this so we continue looking. */
3401 cond_exec_changed_p = TRUE;
3402 return ce_info.test_bb;
3405 /* Return true if a block has two edges, one of which falls through to the next
3406 block, and the other jumps to a specific block, so that we can tell if the
3407 block is part of an && test or an || test. Returns either -1 or the number
3408 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3410 static int
3411 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3413 edge cur_edge;
3414 int fallthru_p = FALSE;
3415 int jump_p = FALSE;
3416 rtx_insn *insn;
3417 rtx_insn *end;
3418 int n_insns = 0;
3419 edge_iterator ei;
3421 if (!cur_bb || !target_bb)
3422 return -1;
3424 /* If no edges, obviously it doesn't jump or fallthru. */
3425 if (EDGE_COUNT (cur_bb->succs) == 0)
3426 return FALSE;
3428 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3430 if (cur_edge->flags & EDGE_COMPLEX)
3431 /* Anything complex isn't what we want. */
3432 return -1;
3434 else if (cur_edge->flags & EDGE_FALLTHRU)
3435 fallthru_p = TRUE;
3437 else if (cur_edge->dest == target_bb)
3438 jump_p = TRUE;
3440 else
3441 return -1;
3444 if ((jump_p & fallthru_p) == 0)
3445 return -1;
3447 /* Don't allow calls in the block, since this is used to group && and ||
3448 together for conditional execution support. ??? we should support
3449 conditional execution support across calls for IA-64 some day, but
3450 for now it makes the code simpler. */
3451 end = BB_END (cur_bb);
3452 insn = BB_HEAD (cur_bb);
3454 while (insn != NULL_RTX)
3456 if (CALL_P (insn))
3457 return -1;
3459 if (INSN_P (insn)
3460 && !JUMP_P (insn)
3461 && !DEBUG_INSN_P (insn)
3462 && GET_CODE (PATTERN (insn)) != USE
3463 && GET_CODE (PATTERN (insn)) != CLOBBER)
3464 n_insns++;
3466 if (insn == end)
3467 break;
3469 insn = NEXT_INSN (insn);
3472 return n_insns;
3475 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3476 block. If so, we'll try to convert the insns to not require the branch.
3477 Return TRUE if we were successful at converting the block. */
3479 static int
3480 cond_exec_find_if_block (struct ce_if_block * ce_info)
3482 basic_block test_bb = ce_info->test_bb;
3483 basic_block then_bb = ce_info->then_bb;
3484 basic_block else_bb = ce_info->else_bb;
3485 basic_block join_bb = NULL_BLOCK;
3486 edge cur_edge;
3487 basic_block next;
3488 edge_iterator ei;
3490 ce_info->last_test_bb = test_bb;
3492 /* We only ever should get here after reload,
3493 and if we have conditional execution. */
3494 gcc_assert (reload_completed && targetm.have_conditional_execution ());
3496 /* Discover if any fall through predecessors of the current test basic block
3497 were && tests (which jump to the else block) or || tests (which jump to
3498 the then block). */
3499 if (single_pred_p (test_bb)
3500 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3502 basic_block bb = single_pred (test_bb);
3503 basic_block target_bb;
3504 int max_insns = MAX_CONDITIONAL_EXECUTE;
3505 int n_insns;
3507 /* Determine if the preceding block is an && or || block. */
3508 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3510 ce_info->and_and_p = TRUE;
3511 target_bb = else_bb;
3513 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3515 ce_info->and_and_p = FALSE;
3516 target_bb = then_bb;
3518 else
3519 target_bb = NULL_BLOCK;
3521 if (target_bb && n_insns <= max_insns)
3523 int total_insns = 0;
3524 int blocks = 0;
3526 ce_info->last_test_bb = test_bb;
3528 /* Found at least one && or || block, look for more. */
3531 ce_info->test_bb = test_bb = bb;
3532 total_insns += n_insns;
3533 blocks++;
3535 if (!single_pred_p (bb))
3536 break;
3538 bb = single_pred (bb);
3539 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3541 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3543 ce_info->num_multiple_test_blocks = blocks;
3544 ce_info->num_multiple_test_insns = total_insns;
3546 if (ce_info->and_and_p)
3547 ce_info->num_and_and_blocks = blocks;
3548 else
3549 ce_info->num_or_or_blocks = blocks;
3553 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3554 other than any || blocks which jump to the THEN block. */
3555 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3556 return FALSE;
3558 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3559 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3561 if (cur_edge->flags & EDGE_COMPLEX)
3562 return FALSE;
3565 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3567 if (cur_edge->flags & EDGE_COMPLEX)
3568 return FALSE;
3571 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3572 if (EDGE_COUNT (then_bb->succs) > 0
3573 && (!single_succ_p (then_bb)
3574 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3575 || (epilogue_completed
3576 && tablejump_p (BB_END (then_bb), NULL, NULL))))
3577 return FALSE;
3579 /* If the THEN block has no successors, conditional execution can still
3580 make a conditional call. Don't do this unless the ELSE block has
3581 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3582 Check for the last insn of the THEN block being an indirect jump, which
3583 is listed as not having any successors, but confuses the rest of the CE
3584 code processing. ??? we should fix this in the future. */
3585 if (EDGE_COUNT (then_bb->succs) == 0)
3587 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3589 rtx_insn *last_insn = BB_END (then_bb);
3591 while (last_insn
3592 && NOTE_P (last_insn)
3593 && last_insn != BB_HEAD (then_bb))
3594 last_insn = PREV_INSN (last_insn);
3596 if (last_insn
3597 && JUMP_P (last_insn)
3598 && ! simplejump_p (last_insn))
3599 return FALSE;
3601 join_bb = else_bb;
3602 else_bb = NULL_BLOCK;
3604 else
3605 return FALSE;
3608 /* If the THEN block's successor is the other edge out of the TEST block,
3609 then we have an IF-THEN combo without an ELSE. */
3610 else if (single_succ (then_bb) == else_bb)
3612 join_bb = else_bb;
3613 else_bb = NULL_BLOCK;
3616 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3617 has exactly one predecessor and one successor, and the outgoing edge
3618 is not complex, then we have an IF-THEN-ELSE combo. */
3619 else if (single_succ_p (else_bb)
3620 && single_succ (then_bb) == single_succ (else_bb)
3621 && single_pred_p (else_bb)
3622 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3623 && !(epilogue_completed
3624 && tablejump_p (BB_END (else_bb), NULL, NULL)))
3625 join_bb = single_succ (else_bb);
3627 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3628 else
3629 return FALSE;
3631 num_possible_if_blocks++;
3633 if (dump_file)
3635 fprintf (dump_file,
3636 "\nIF-THEN%s block found, pass %d, start block %d "
3637 "[insn %d], then %d [%d]",
3638 (else_bb) ? "-ELSE" : "",
3639 ce_info->pass,
3640 test_bb->index,
3641 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3642 then_bb->index,
3643 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3645 if (else_bb)
3646 fprintf (dump_file, ", else %d [%d]",
3647 else_bb->index,
3648 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3650 fprintf (dump_file, ", join %d [%d]",
3651 join_bb->index,
3652 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3654 if (ce_info->num_multiple_test_blocks > 0)
3655 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3656 ce_info->num_multiple_test_blocks,
3657 (ce_info->and_and_p) ? "&&" : "||",
3658 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3659 ce_info->last_test_bb->index,
3660 ((BB_HEAD (ce_info->last_test_bb))
3661 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3662 : -1));
3664 fputc ('\n', dump_file);
3667 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3668 first condition for free, since we've already asserted that there's a
3669 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3670 we checked the FALLTHRU flag, those are already adjacent to the last IF
3671 block. */
3672 /* ??? As an enhancement, move the ELSE block. Have to deal with
3673 BLOCK notes, if by no other means than backing out the merge if they
3674 exist. Sticky enough I don't want to think about it now. */
3675 next = then_bb;
3676 if (else_bb && (next = next->next_bb) != else_bb)
3677 return FALSE;
3678 if ((next = next->next_bb) != join_bb
3679 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3681 if (else_bb)
3682 join_bb = NULL;
3683 else
3684 return FALSE;
3687 /* Do the real work. */
3689 ce_info->else_bb = else_bb;
3690 ce_info->join_bb = join_bb;
3692 /* If we have && and || tests, try to first handle combining the && and ||
3693 tests into the conditional code, and if that fails, go back and handle
3694 it without the && and ||, which at present handles the && case if there
3695 was no ELSE block. */
3696 if (cond_exec_process_if_block (ce_info, TRUE))
3697 return TRUE;
3699 if (ce_info->num_multiple_test_blocks)
3701 cancel_changes (0);
3703 if (cond_exec_process_if_block (ce_info, FALSE))
3704 return TRUE;
3707 return FALSE;
3710 /* Convert a branch over a trap, or a branch
3711 to a trap, into a conditional trap. */
3713 static int
3714 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3716 basic_block then_bb = then_edge->dest;
3717 basic_block else_bb = else_edge->dest;
3718 basic_block other_bb, trap_bb;
3719 rtx_insn *trap, *jump;
3720 rtx cond, seq;
3721 rtx_insn *cond_earliest;
3722 enum rtx_code code;
3724 /* Locate the block with the trap instruction. */
3725 /* ??? While we look for no successors, we really ought to allow
3726 EH successors. Need to fix merge_if_block for that to work. */
3727 if ((trap = block_has_only_trap (then_bb)) != NULL)
3728 trap_bb = then_bb, other_bb = else_bb;
3729 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3730 trap_bb = else_bb, other_bb = then_bb;
3731 else
3732 return FALSE;
3734 if (dump_file)
3736 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3737 test_bb->index, trap_bb->index);
3740 /* If this is not a standard conditional jump, we can't parse it. */
3741 jump = BB_END (test_bb);
3742 cond = noce_get_condition (jump, &cond_earliest, false);
3743 if (! cond)
3744 return FALSE;
3746 /* If the conditional jump is more than just a conditional jump, then
3747 we can not do if-conversion on this block. */
3748 if (! onlyjump_p (jump))
3749 return FALSE;
3751 /* We must be comparing objects whose modes imply the size. */
3752 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3753 return FALSE;
3755 /* Reverse the comparison code, if necessary. */
3756 code = GET_CODE (cond);
3757 if (then_bb == trap_bb)
3759 code = reversed_comparison_code (cond, jump);
3760 if (code == UNKNOWN)
3761 return FALSE;
3764 /* Attempt to generate the conditional trap. */
3765 seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3766 copy_rtx (XEXP (cond, 1)),
3767 TRAP_CODE (PATTERN (trap)));
3768 if (seq == NULL)
3769 return FALSE;
3771 /* Emit the new insns before cond_earliest. */
3772 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3774 /* Delete the trap block if possible. */
3775 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3776 df_set_bb_dirty (test_bb);
3777 df_set_bb_dirty (then_bb);
3778 df_set_bb_dirty (else_bb);
3780 if (EDGE_COUNT (trap_bb->preds) == 0)
3782 delete_basic_block (trap_bb);
3783 num_true_changes++;
3786 /* Wire together the blocks again. */
3787 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3788 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3789 else if (trap_bb == then_bb)
3791 rtx lab;
3792 rtx_insn *newjump;
3794 lab = JUMP_LABEL (jump);
3795 newjump = emit_jump_insn_after (gen_jump (lab), jump);
3796 LABEL_NUSES (lab) += 1;
3797 JUMP_LABEL (newjump) = lab;
3798 emit_barrier_after (newjump);
3800 delete_insn (jump);
3802 if (can_merge_blocks_p (test_bb, other_bb))
3804 merge_blocks (test_bb, other_bb);
3805 num_true_changes++;
3808 num_updated_if_blocks++;
3809 return TRUE;
3812 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3813 return it. */
3815 static rtx_insn *
3816 block_has_only_trap (basic_block bb)
3818 rtx_insn *trap;
3820 /* We're not the exit block. */
3821 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3822 return NULL;
3824 /* The block must have no successors. */
3825 if (EDGE_COUNT (bb->succs) > 0)
3826 return NULL;
3828 /* The only instruction in the THEN block must be the trap. */
3829 trap = first_active_insn (bb);
3830 if (! (trap == BB_END (bb)
3831 && GET_CODE (PATTERN (trap)) == TRAP_IF
3832 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3833 return NULL;
3835 return trap;
3838 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3839 transformable, but not necessarily the other. There need be no
3840 JOIN block.
3842 Return TRUE if we were successful at converting the block.
3844 Cases we'd like to look at:
3847 if (test) goto over; // x not live
3848 x = a;
3849 goto label;
3850 over:
3852 becomes
3854 x = a;
3855 if (! test) goto label;
3858 if (test) goto E; // x not live
3859 x = big();
3860 goto L;
3862 x = b;
3863 goto M;
3865 becomes
3867 x = b;
3868 if (test) goto M;
3869 x = big();
3870 goto L;
3872 (3) // This one's really only interesting for targets that can do
3873 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3874 // it results in multiple branches on a cache line, which often
3875 // does not sit well with predictors.
3877 if (test1) goto E; // predicted not taken
3878 x = a;
3879 if (test2) goto F;
3882 x = b;
3885 becomes
3887 x = a;
3888 if (test1) goto E;
3889 if (test2) goto F;
3891 Notes:
3893 (A) Don't do (2) if the branch is predicted against the block we're
3894 eliminating. Do it anyway if we can eliminate a branch; this requires
3895 that the sole successor of the eliminated block postdominate the other
3896 side of the if.
3898 (B) With CE, on (3) we can steal from both sides of the if, creating
3900 if (test1) x = a;
3901 if (!test1) x = b;
3902 if (test1) goto J;
3903 if (test2) goto F;
3907 Again, this is most useful if J postdominates.
3909 (C) CE substitutes for helpful life information.
3911 (D) These heuristics need a lot of work. */
3913 /* Tests for case 1 above. */
3915 static int
3916 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3918 basic_block then_bb = then_edge->dest;
3919 basic_block else_bb = else_edge->dest;
3920 basic_block new_bb;
3921 int then_bb_index, then_prob;
3922 rtx else_target = NULL_RTX;
3924 /* If we are partitioning hot/cold basic blocks, we don't want to
3925 mess up unconditional or indirect jumps that cross between hot
3926 and cold sections.
3928 Basic block partitioning may result in some jumps that appear to
3929 be optimizable (or blocks that appear to be mergeable), but which really
3930 must be left untouched (they are required to make it safely across
3931 partition boundaries). See the comments at the top of
3932 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3934 if ((BB_END (then_bb)
3935 && JUMP_P (BB_END (then_bb))
3936 && CROSSING_JUMP_P (BB_END (then_bb)))
3937 || (BB_END (test_bb)
3938 && JUMP_P (BB_END (test_bb))
3939 && CROSSING_JUMP_P (BB_END (test_bb)))
3940 || (BB_END (else_bb)
3941 && JUMP_P (BB_END (else_bb))
3942 && CROSSING_JUMP_P (BB_END (else_bb))))
3943 return FALSE;
3945 /* THEN has one successor. */
3946 if (!single_succ_p (then_bb))
3947 return FALSE;
3949 /* THEN does not fall through, but is not strange either. */
3950 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3951 return FALSE;
3953 /* THEN has one predecessor. */
3954 if (!single_pred_p (then_bb))
3955 return FALSE;
3957 /* THEN must do something. */
3958 if (forwarder_block_p (then_bb))
3959 return FALSE;
3961 num_possible_if_blocks++;
3962 if (dump_file)
3963 fprintf (dump_file,
3964 "\nIF-CASE-1 found, start %d, then %d\n",
3965 test_bb->index, then_bb->index);
3967 if (then_edge->probability)
3968 then_prob = REG_BR_PROB_BASE - then_edge->probability;
3969 else
3970 then_prob = REG_BR_PROB_BASE / 2;
3972 /* We're speculating from the THEN path, we want to make sure the cost
3973 of speculation is within reason. */
3974 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
3975 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3976 predictable_edge_p (then_edge)))))
3977 return FALSE;
3979 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3981 rtx_insn *jump = BB_END (else_edge->src);
3982 gcc_assert (JUMP_P (jump));
3983 else_target = JUMP_LABEL (jump);
3986 /* Registers set are dead, or are predicable. */
3987 if (! dead_or_predicable (test_bb, then_bb, else_bb,
3988 single_succ_edge (then_bb), 1))
3989 return FALSE;
3991 /* Conversion went ok, including moving the insns and fixing up the
3992 jump. Adjust the CFG to match. */
3994 /* We can avoid creating a new basic block if then_bb is immediately
3995 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3996 through to else_bb. */
3998 if (then_bb->next_bb == else_bb
3999 && then_bb->prev_bb == test_bb
4000 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4002 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4003 new_bb = 0;
4005 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4006 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4007 else_bb, else_target);
4008 else
4009 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4010 else_bb);
4012 df_set_bb_dirty (test_bb);
4013 df_set_bb_dirty (else_bb);
4015 then_bb_index = then_bb->index;
4016 delete_basic_block (then_bb);
4018 /* Make rest of code believe that the newly created block is the THEN_BB
4019 block we removed. */
4020 if (new_bb)
4022 df_bb_replace (then_bb_index, new_bb);
4023 /* This should have been done above via force_nonfallthru_and_redirect
4024 (possibly called from redirect_edge_and_branch_force). */
4025 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4028 num_true_changes++;
4029 num_updated_if_blocks++;
4031 return TRUE;
4034 /* Test for case 2 above. */
4036 static int
4037 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4039 basic_block then_bb = then_edge->dest;
4040 basic_block else_bb = else_edge->dest;
4041 edge else_succ;
4042 int then_prob, else_prob;
4044 /* We do not want to speculate (empty) loop latches. */
4045 if (current_loops
4046 && else_bb->loop_father->latch == else_bb)
4047 return FALSE;
4049 /* If we are partitioning hot/cold basic blocks, we don't want to
4050 mess up unconditional or indirect jumps that cross between hot
4051 and cold sections.
4053 Basic block partitioning may result in some jumps that appear to
4054 be optimizable (or blocks that appear to be mergeable), but which really
4055 must be left untouched (they are required to make it safely across
4056 partition boundaries). See the comments at the top of
4057 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4059 if ((BB_END (then_bb)
4060 && JUMP_P (BB_END (then_bb))
4061 && CROSSING_JUMP_P (BB_END (then_bb)))
4062 || (BB_END (test_bb)
4063 && JUMP_P (BB_END (test_bb))
4064 && CROSSING_JUMP_P (BB_END (test_bb)))
4065 || (BB_END (else_bb)
4066 && JUMP_P (BB_END (else_bb))
4067 && CROSSING_JUMP_P (BB_END (else_bb))))
4068 return FALSE;
4070 /* ELSE has one successor. */
4071 if (!single_succ_p (else_bb))
4072 return FALSE;
4073 else
4074 else_succ = single_succ_edge (else_bb);
4076 /* ELSE outgoing edge is not complex. */
4077 if (else_succ->flags & EDGE_COMPLEX)
4078 return FALSE;
4080 /* ELSE has one predecessor. */
4081 if (!single_pred_p (else_bb))
4082 return FALSE;
4084 /* THEN is not EXIT. */
4085 if (then_bb->index < NUM_FIXED_BLOCKS)
4086 return FALSE;
4088 if (else_edge->probability)
4090 else_prob = else_edge->probability;
4091 then_prob = REG_BR_PROB_BASE - else_prob;
4093 else
4095 else_prob = REG_BR_PROB_BASE / 2;
4096 then_prob = REG_BR_PROB_BASE / 2;
4099 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4100 if (else_prob > then_prob)
4102 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4103 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4104 else_succ->dest))
4106 else
4107 return FALSE;
4109 num_possible_if_blocks++;
4110 if (dump_file)
4111 fprintf (dump_file,
4112 "\nIF-CASE-2 found, start %d, else %d\n",
4113 test_bb->index, else_bb->index);
4115 /* We're speculating from the ELSE path, we want to make sure the cost
4116 of speculation is within reason. */
4117 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4118 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4119 predictable_edge_p (else_edge)))))
4120 return FALSE;
4122 /* Registers set are dead, or are predicable. */
4123 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4124 return FALSE;
4126 /* Conversion went ok, including moving the insns and fixing up the
4127 jump. Adjust the CFG to match. */
4129 df_set_bb_dirty (test_bb);
4130 df_set_bb_dirty (then_bb);
4131 delete_basic_block (else_bb);
4133 num_true_changes++;
4134 num_updated_if_blocks++;
4136 /* ??? We may now fallthru from one of THEN's successors into a join
4137 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4139 return TRUE;
4142 /* Used by the code above to perform the actual rtl transformations.
4143 Return TRUE if successful.
4145 TEST_BB is the block containing the conditional branch. MERGE_BB
4146 is the block containing the code to manipulate. DEST_EDGE is an
4147 edge representing a jump to the join block; after the conversion,
4148 TEST_BB should be branching to its destination.
4149 REVERSEP is true if the sense of the branch should be reversed. */
4151 static int
4152 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4153 basic_block other_bb, edge dest_edge, int reversep)
4155 basic_block new_dest = dest_edge->dest;
4156 rtx_insn *head, *end, *jump;
4157 rtx_insn *earliest = NULL;
4158 rtx old_dest;
4159 bitmap merge_set = NULL;
4160 /* Number of pending changes. */
4161 int n_validated_changes = 0;
4162 rtx new_dest_label = NULL_RTX;
4164 jump = BB_END (test_bb);
4166 /* Find the extent of the real code in the merge block. */
4167 head = BB_HEAD (merge_bb);
4168 end = BB_END (merge_bb);
4170 while (DEBUG_INSN_P (end) && end != head)
4171 end = PREV_INSN (end);
4173 /* If merge_bb ends with a tablejump, predicating/moving insn's
4174 into test_bb and then deleting merge_bb will result in the jumptable
4175 that follows merge_bb being removed along with merge_bb and then we
4176 get an unresolved reference to the jumptable. */
4177 if (tablejump_p (end, NULL, NULL))
4178 return FALSE;
4180 if (LABEL_P (head))
4181 head = NEXT_INSN (head);
4182 while (DEBUG_INSN_P (head) && head != end)
4183 head = NEXT_INSN (head);
4184 if (NOTE_P (head))
4186 if (head == end)
4188 head = end = NULL;
4189 goto no_body;
4191 head = NEXT_INSN (head);
4192 while (DEBUG_INSN_P (head) && head != end)
4193 head = NEXT_INSN (head);
4196 if (JUMP_P (end))
4198 if (!onlyjump_p (end))
4199 return FALSE;
4200 if (head == end)
4202 head = end = NULL;
4203 goto no_body;
4205 end = PREV_INSN (end);
4206 while (DEBUG_INSN_P (end) && end != head)
4207 end = PREV_INSN (end);
4210 /* Don't move frame-related insn across the conditional branch. This
4211 can lead to one of the paths of the branch having wrong unwind info. */
4212 if (epilogue_completed)
4214 rtx_insn *insn = head;
4215 while (1)
4217 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4218 return FALSE;
4219 if (insn == end)
4220 break;
4221 insn = NEXT_INSN (insn);
4225 /* Disable handling dead code by conditional execution if the machine needs
4226 to do anything funny with the tests, etc. */
4227 #ifndef IFCVT_MODIFY_TESTS
4228 if (targetm.have_conditional_execution ())
4230 /* In the conditional execution case, we have things easy. We know
4231 the condition is reversible. We don't have to check life info
4232 because we're going to conditionally execute the code anyway.
4233 All that's left is making sure the insns involved can actually
4234 be predicated. */
4236 rtx cond;
4238 cond = cond_exec_get_condition (jump);
4239 if (! cond)
4240 return FALSE;
4242 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4243 int prob_val = (note ? XINT (note, 0) : -1);
4245 if (reversep)
4247 enum rtx_code rev = reversed_comparison_code (cond, jump);
4248 if (rev == UNKNOWN)
4249 return FALSE;
4250 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4251 XEXP (cond, 1));
4252 if (prob_val >= 0)
4253 prob_val = REG_BR_PROB_BASE - prob_val;
4256 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4257 && verify_changes (0))
4258 n_validated_changes = num_validated_changes ();
4259 else
4260 cancel_changes (0);
4262 earliest = jump;
4264 #endif
4266 /* If we allocated new pseudos (e.g. in the conditional move
4267 expander called from noce_emit_cmove), we must resize the
4268 array first. */
4269 if (max_regno < max_reg_num ())
4270 max_regno = max_reg_num ();
4272 /* Try the NCE path if the CE path did not result in any changes. */
4273 if (n_validated_changes == 0)
4275 rtx cond;
4276 rtx_insn *insn;
4277 regset live;
4278 bool success;
4280 /* In the non-conditional execution case, we have to verify that there
4281 are no trapping operations, no calls, no references to memory, and
4282 that any registers modified are dead at the branch site. */
4284 if (!any_condjump_p (jump))
4285 return FALSE;
4287 /* Find the extent of the conditional. */
4288 cond = noce_get_condition (jump, &earliest, false);
4289 if (!cond)
4290 return FALSE;
4292 live = BITMAP_ALLOC (&reg_obstack);
4293 simulate_backwards_to_point (merge_bb, live, end);
4294 success = can_move_insns_across (head, end, earliest, jump,
4295 merge_bb, live,
4296 df_get_live_in (other_bb), NULL);
4297 BITMAP_FREE (live);
4298 if (!success)
4299 return FALSE;
4301 /* Collect the set of registers set in MERGE_BB. */
4302 merge_set = BITMAP_ALLOC (&reg_obstack);
4304 FOR_BB_INSNS (merge_bb, insn)
4305 if (NONDEBUG_INSN_P (insn))
4306 df_simulate_find_defs (insn, merge_set);
4308 /* If shrink-wrapping, disable this optimization when test_bb is
4309 the first basic block and merge_bb exits. The idea is to not
4310 move code setting up a return register as that may clobber a
4311 register used to pass function parameters, which then must be
4312 saved in caller-saved regs. A caller-saved reg requires the
4313 prologue, killing a shrink-wrap opportunity. */
4314 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4315 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4316 && single_succ_p (new_dest)
4317 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4318 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4320 regset return_regs;
4321 unsigned int i;
4323 return_regs = BITMAP_ALLOC (&reg_obstack);
4325 /* Start off with the intersection of regs used to pass
4326 params and regs used to return values. */
4327 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4328 if (FUNCTION_ARG_REGNO_P (i)
4329 && targetm.calls.function_value_regno_p (i))
4330 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4332 bitmap_and_into (return_regs,
4333 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4334 bitmap_and_into (return_regs,
4335 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4336 if (!bitmap_empty_p (return_regs))
4338 FOR_BB_INSNS_REVERSE (new_dest, insn)
4339 if (NONDEBUG_INSN_P (insn))
4341 df_ref def;
4343 /* If this insn sets any reg in return_regs, add all
4344 reg uses to the set of regs we're interested in. */
4345 FOR_EACH_INSN_DEF (def, insn)
4346 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4348 df_simulate_uses (insn, return_regs);
4349 break;
4352 if (bitmap_intersect_p (merge_set, return_regs))
4354 BITMAP_FREE (return_regs);
4355 BITMAP_FREE (merge_set);
4356 return FALSE;
4359 BITMAP_FREE (return_regs);
4363 no_body:
4364 /* We don't want to use normal invert_jump or redirect_jump because
4365 we don't want to delete_insn called. Also, we want to do our own
4366 change group management. */
4368 old_dest = JUMP_LABEL (jump);
4369 if (other_bb != new_dest)
4371 if (!any_condjump_p (jump))
4372 goto cancel;
4374 if (JUMP_P (BB_END (dest_edge->src)))
4375 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4376 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4377 new_dest_label = ret_rtx;
4378 else
4379 new_dest_label = block_label (new_dest);
4381 if (reversep
4382 ? ! invert_jump_1 (jump, new_dest_label)
4383 : ! redirect_jump_1 (jump, new_dest_label))
4384 goto cancel;
4387 if (verify_changes (n_validated_changes))
4388 confirm_change_group ();
4389 else
4390 goto cancel;
4392 if (other_bb != new_dest)
4394 redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep);
4396 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4397 if (reversep)
4399 gcov_type count, probability;
4400 count = BRANCH_EDGE (test_bb)->count;
4401 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4402 FALLTHRU_EDGE (test_bb)->count = count;
4403 probability = BRANCH_EDGE (test_bb)->probability;
4404 BRANCH_EDGE (test_bb)->probability
4405 = FALLTHRU_EDGE (test_bb)->probability;
4406 FALLTHRU_EDGE (test_bb)->probability = probability;
4407 update_br_prob_note (test_bb);
4411 /* Move the insns out of MERGE_BB to before the branch. */
4412 if (head != NULL)
4414 rtx_insn *insn;
4416 if (end == BB_END (merge_bb))
4417 BB_END (merge_bb) = PREV_INSN (head);
4419 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4420 notes being moved might become invalid. */
4421 insn = head;
4424 rtx note;
4426 if (! INSN_P (insn))
4427 continue;
4428 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4429 if (! note)
4430 continue;
4431 remove_note (insn, note);
4432 } while (insn != end && (insn = NEXT_INSN (insn)));
4434 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4435 notes referring to the registers being set might become invalid. */
4436 if (merge_set)
4438 unsigned i;
4439 bitmap_iterator bi;
4441 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4442 remove_reg_equal_equiv_notes_for_regno (i);
4444 BITMAP_FREE (merge_set);
4447 reorder_insns (head, end, PREV_INSN (earliest));
4450 /* Remove the jump and edge if we can. */
4451 if (other_bb == new_dest)
4453 delete_insn (jump);
4454 remove_edge (BRANCH_EDGE (test_bb));
4455 /* ??? Can't merge blocks here, as then_bb is still in use.
4456 At minimum, the merge will get done just before bb-reorder. */
4459 return TRUE;
4461 cancel:
4462 cancel_changes (0);
4464 if (merge_set)
4465 BITMAP_FREE (merge_set);
4467 return FALSE;
4470 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
4471 we are after combine pass. */
4473 static void
4474 if_convert (bool after_combine)
4476 basic_block bb;
4477 int pass;
4479 if (optimize == 1)
4481 df_live_add_problem ();
4482 df_live_set_all_dirty ();
4485 /* Record whether we are after combine pass. */
4486 ifcvt_after_combine = after_combine;
4487 num_possible_if_blocks = 0;
4488 num_updated_if_blocks = 0;
4489 num_true_changes = 0;
4491 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4492 mark_loop_exit_edges ();
4493 loop_optimizer_finalize ();
4494 free_dominance_info (CDI_DOMINATORS);
4496 /* Compute postdominators. */
4497 calculate_dominance_info (CDI_POST_DOMINATORS);
4499 df_set_flags (DF_LR_RUN_DCE);
4501 /* Go through each of the basic blocks looking for things to convert. If we
4502 have conditional execution, we make multiple passes to allow us to handle
4503 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4504 pass = 0;
4507 df_analyze ();
4508 /* Only need to do dce on the first pass. */
4509 df_clear_flags (DF_LR_RUN_DCE);
4510 cond_exec_changed_p = FALSE;
4511 pass++;
4513 #ifdef IFCVT_MULTIPLE_DUMPS
4514 if (dump_file && pass > 1)
4515 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4516 #endif
4518 FOR_EACH_BB_FN (bb, cfun)
4520 basic_block new_bb;
4521 while (!df_get_bb_dirty (bb)
4522 && (new_bb = find_if_header (bb, pass)) != NULL)
4523 bb = new_bb;
4526 #ifdef IFCVT_MULTIPLE_DUMPS
4527 if (dump_file && cond_exec_changed_p)
4528 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4529 #endif
4531 while (cond_exec_changed_p);
4533 #ifdef IFCVT_MULTIPLE_DUMPS
4534 if (dump_file)
4535 fprintf (dump_file, "\n\n========== no more changes\n");
4536 #endif
4538 free_dominance_info (CDI_POST_DOMINATORS);
4540 if (dump_file)
4541 fflush (dump_file);
4543 clear_aux_for_blocks ();
4545 /* If we allocated new pseudos, we must resize the array for sched1. */
4546 if (max_regno < max_reg_num ())
4547 max_regno = max_reg_num ();
4549 /* Write the final stats. */
4550 if (dump_file && num_possible_if_blocks > 0)
4552 fprintf (dump_file,
4553 "\n%d possible IF blocks searched.\n",
4554 num_possible_if_blocks);
4555 fprintf (dump_file,
4556 "%d IF blocks converted.\n",
4557 num_updated_if_blocks);
4558 fprintf (dump_file,
4559 "%d true changes made.\n\n\n",
4560 num_true_changes);
4563 if (optimize == 1)
4564 df_remove_problem (df_live);
4566 #ifdef ENABLE_CHECKING
4567 verify_flow_info ();
4568 #endif
4571 /* If-conversion and CFG cleanup. */
4572 static unsigned int
4573 rest_of_handle_if_conversion (void)
4575 if (flag_if_conversion)
4577 if (dump_file)
4579 dump_reg_info (dump_file);
4580 dump_flow_info (dump_file, dump_flags);
4582 cleanup_cfg (CLEANUP_EXPENSIVE);
4583 if_convert (false);
4586 cleanup_cfg (0);
4587 return 0;
4590 namespace {
4592 const pass_data pass_data_rtl_ifcvt =
4594 RTL_PASS, /* type */
4595 "ce1", /* name */
4596 OPTGROUP_NONE, /* optinfo_flags */
4597 TV_IFCVT, /* tv_id */
4598 0, /* properties_required */
4599 0, /* properties_provided */
4600 0, /* properties_destroyed */
4601 0, /* todo_flags_start */
4602 TODO_df_finish, /* todo_flags_finish */
4605 class pass_rtl_ifcvt : public rtl_opt_pass
4607 public:
4608 pass_rtl_ifcvt (gcc::context *ctxt)
4609 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4612 /* opt_pass methods: */
4613 virtual bool gate (function *)
4615 return (optimize > 0) && dbg_cnt (if_conversion);
4618 virtual unsigned int execute (function *)
4620 return rest_of_handle_if_conversion ();
4623 }; // class pass_rtl_ifcvt
4625 } // anon namespace
4627 rtl_opt_pass *
4628 make_pass_rtl_ifcvt (gcc::context *ctxt)
4630 return new pass_rtl_ifcvt (ctxt);
4634 /* Rerun if-conversion, as combine may have simplified things enough
4635 to now meet sequence length restrictions. */
4637 namespace {
4639 const pass_data pass_data_if_after_combine =
4641 RTL_PASS, /* type */
4642 "ce2", /* name */
4643 OPTGROUP_NONE, /* optinfo_flags */
4644 TV_IFCVT, /* tv_id */
4645 0, /* properties_required */
4646 0, /* properties_provided */
4647 0, /* properties_destroyed */
4648 0, /* todo_flags_start */
4649 TODO_df_finish, /* todo_flags_finish */
4652 class pass_if_after_combine : public rtl_opt_pass
4654 public:
4655 pass_if_after_combine (gcc::context *ctxt)
4656 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4659 /* opt_pass methods: */
4660 virtual bool gate (function *)
4662 return optimize > 0 && flag_if_conversion
4663 && dbg_cnt (if_after_combine);
4666 virtual unsigned int execute (function *)
4668 if_convert (true);
4669 return 0;
4672 }; // class pass_if_after_combine
4674 } // anon namespace
4676 rtl_opt_pass *
4677 make_pass_if_after_combine (gcc::context *ctxt)
4679 return new pass_if_after_combine (ctxt);
4683 namespace {
4685 const pass_data pass_data_if_after_reload =
4687 RTL_PASS, /* type */
4688 "ce3", /* name */
4689 OPTGROUP_NONE, /* optinfo_flags */
4690 TV_IFCVT2, /* tv_id */
4691 0, /* properties_required */
4692 0, /* properties_provided */
4693 0, /* properties_destroyed */
4694 0, /* todo_flags_start */
4695 TODO_df_finish, /* todo_flags_finish */
4698 class pass_if_after_reload : public rtl_opt_pass
4700 public:
4701 pass_if_after_reload (gcc::context *ctxt)
4702 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4705 /* opt_pass methods: */
4706 virtual bool gate (function *)
4708 return optimize > 0 && flag_if_conversion2
4709 && dbg_cnt (if_after_reload);
4712 virtual unsigned int execute (function *)
4714 if_convert (true);
4715 return 0;
4718 }; // class pass_if_after_reload
4720 } // anon namespace
4722 rtl_opt_pass *
4723 make_pass_if_after_reload (gcc::context *ctxt)
4725 return new pass_if_after_reload (ctxt);