make recog () take a rtx_insn *
[official-gcc.git] / gcc / ifcvt.c
blobdbe2485987019e06022f59b365924ae62ed94d57
1 /* If-conversion support.
2 Copyright (C) 2000-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
37 #include "cfgrtl.h"
38 #include "cfganal.h"
39 #include "cfgcleanup.h"
40 #include "expr.h"
41 #include "output.h"
42 #include "cfgloop.h"
43 #include "tree-pass.h"
44 #include "dbgcnt.h"
45 #include "shrink-wrap.h"
46 #include "rtl-iter.h"
47 #include "ifcvt.h"
48 #include "params.h"
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE \
52 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
53 + 1)
54 #endif
56 #define IFCVT_MULTIPLE_DUMPS 1
58 #define NULL_BLOCK ((basic_block) NULL)
60 /* True if after combine pass. */
61 static bool ifcvt_after_combine;
63 /* True if the target has the cbranchcc4 optab. */
64 static bool have_cbranchcc4;
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
73 /* # of changes made. */
74 static int num_true_changes;
76 /* Whether conditional execution changes were made. */
77 static int cond_exec_changed_p;
79 /* Forward references. */
80 static int count_bb_insns (const_basic_block);
81 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
82 static rtx_insn *first_active_insn (basic_block);
83 static rtx_insn *last_active_insn (basic_block, int);
84 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
85 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
86 static basic_block block_fallthru (basic_block);
87 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
88 int);
89 static rtx cond_exec_get_condition (rtx_insn *);
90 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
91 static int noce_operand_ok (const_rtx);
92 static void merge_if_block (ce_if_block *);
93 static int find_cond_trap (basic_block, edge, edge);
94 static basic_block find_if_header (basic_block, int);
95 static int block_jumps_and_fallthru_p (basic_block, basic_block);
96 static int noce_find_if_block (basic_block, edge, edge, int);
97 static int cond_exec_find_if_block (ce_if_block *);
98 static int find_if_case_1 (basic_block, edge, edge);
99 static int find_if_case_2 (basic_block, edge, edge);
100 static int dead_or_predicable (basic_block, basic_block, basic_block,
101 edge, int);
102 static void noce_emit_move_insn (rtx, rtx);
103 static rtx_insn *block_has_only_trap (basic_block);
105 /* Count the number of non-jump active insns in BB. */
107 static int
108 count_bb_insns (const_basic_block bb)
110 int count = 0;
111 rtx_insn *insn = BB_HEAD (bb);
113 while (1)
115 if (active_insn_p (insn) && !JUMP_P (insn))
116 count++;
118 if (insn == BB_END (bb))
119 break;
120 insn = NEXT_INSN (insn);
123 return count;
126 /* Determine whether the total insn_rtx_cost on non-jump insns in
127 basic block BB is less than MAX_COST. This function returns
128 false if the cost of any instruction could not be estimated.
130 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
131 as those insns are being speculated. MAX_COST is scaled with SCALE
132 plus a small fudge factor. */
134 static bool
135 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
137 int count = 0;
138 rtx_insn *insn = BB_HEAD (bb);
139 bool speed = optimize_bb_for_speed_p (bb);
141 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
142 applied to insn_rtx_cost when optimizing for size. Only do
143 this after combine because if-conversion might interfere with
144 passes before combine.
146 Use optimize_function_for_speed_p instead of the pre-defined
147 variable speed to make sure it is set to same value for all
148 basic blocks in one if-conversion transformation. */
149 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
150 scale = REG_BR_PROB_BASE;
151 /* Our branch probability/scaling factors are just estimates and don't
152 account for cases where we can get speculation for free and other
153 secondary benefits. So we fudge the scale factor to make speculating
154 appear a little more profitable when optimizing for performance. */
155 else
156 scale += REG_BR_PROB_BASE / 8;
159 max_cost *= scale;
161 while (1)
163 if (NONJUMP_INSN_P (insn))
165 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
166 if (cost == 0)
167 return false;
169 /* If this instruction is the load or set of a "stack" register,
170 such as a floating point register on x87, then the cost of
171 speculatively executing this insn may need to include
172 the additional cost of popping its result off of the
173 register stack. Unfortunately, correctly recognizing and
174 accounting for this additional overhead is tricky, so for
175 now we simply prohibit such speculative execution. */
176 #ifdef STACK_REGS
178 rtx set = single_set (insn);
179 if (set && STACK_REG_P (SET_DEST (set)))
180 return false;
182 #endif
184 count += cost;
185 if (count >= max_cost)
186 return false;
188 else if (CALL_P (insn))
189 return false;
191 if (insn == BB_END (bb))
192 break;
193 insn = NEXT_INSN (insn);
196 return true;
199 /* Return the first non-jump active insn in the basic block. */
201 static rtx_insn *
202 first_active_insn (basic_block bb)
204 rtx_insn *insn = BB_HEAD (bb);
206 if (LABEL_P (insn))
208 if (insn == BB_END (bb))
209 return NULL;
210 insn = NEXT_INSN (insn);
213 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
215 if (insn == BB_END (bb))
216 return NULL;
217 insn = NEXT_INSN (insn);
220 if (JUMP_P (insn))
221 return NULL;
223 return insn;
226 /* Return the last non-jump active (non-jump) insn in the basic block. */
228 static rtx_insn *
229 last_active_insn (basic_block bb, int skip_use_p)
231 rtx_insn *insn = BB_END (bb);
232 rtx_insn *head = BB_HEAD (bb);
234 while (NOTE_P (insn)
235 || JUMP_P (insn)
236 || DEBUG_INSN_P (insn)
237 || (skip_use_p
238 && NONJUMP_INSN_P (insn)
239 && GET_CODE (PATTERN (insn)) == USE))
241 if (insn == head)
242 return NULL;
243 insn = PREV_INSN (insn);
246 if (LABEL_P (insn))
247 return NULL;
249 return insn;
252 /* Return the active insn before INSN inside basic block CURR_BB. */
254 static rtx_insn *
255 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
257 if (!insn || insn == BB_HEAD (curr_bb))
258 return NULL;
260 while ((insn = PREV_INSN (insn)) != NULL_RTX)
262 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
263 break;
265 /* No other active insn all the way to the start of the basic block. */
266 if (insn == BB_HEAD (curr_bb))
267 return NULL;
270 return insn;
273 /* Return the active insn after INSN inside basic block CURR_BB. */
275 static rtx_insn *
276 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
278 if (!insn || insn == BB_END (curr_bb))
279 return NULL;
281 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
283 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
284 break;
286 /* No other active insn all the way to the end of the basic block. */
287 if (insn == BB_END (curr_bb))
288 return NULL;
291 return insn;
294 /* Return the basic block reached by falling though the basic block BB. */
296 static basic_block
297 block_fallthru (basic_block bb)
299 edge e = find_fallthru_edge (bb->succs);
301 return (e) ? e->dest : NULL_BLOCK;
304 /* Return true if RTXs A and B can be safely interchanged. */
306 static bool
307 rtx_interchangeable_p (const_rtx a, const_rtx b)
309 if (!rtx_equal_p (a, b))
310 return false;
312 if (GET_CODE (a) != MEM)
313 return true;
315 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
316 reference is not. Interchanging a dead type-unsafe memory reference with
317 a live type-safe one creates a live type-unsafe memory reference, in other
318 words, it makes the program illegal.
319 We check here conservatively whether the two memory references have equal
320 memory attributes. */
322 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
326 /* Go through a bunch of insns, converting them to conditional
327 execution format if possible. Return TRUE if all of the non-note
328 insns were processed. */
330 static int
331 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
332 /* if block information */rtx_insn *start,
333 /* first insn to look at */rtx end,
334 /* last insn to look at */rtx test,
335 /* conditional execution test */int prob_val,
336 /* probability of branch taken. */int mod_ok)
338 int must_be_last = FALSE;
339 rtx_insn *insn;
340 rtx xtest;
341 rtx pattern;
343 if (!start || !end)
344 return FALSE;
346 for (insn = start; ; insn = NEXT_INSN (insn))
348 /* dwarf2out can't cope with conditional prologues. */
349 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
350 return FALSE;
352 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
353 goto insn_done;
355 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
357 /* dwarf2out can't cope with conditional unwind info. */
358 if (RTX_FRAME_RELATED_P (insn))
359 return FALSE;
361 /* Remove USE insns that get in the way. */
362 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
364 /* ??? Ug. Actually unlinking the thing is problematic,
365 given what we'd have to coordinate with our callers. */
366 SET_INSN_DELETED (insn);
367 goto insn_done;
370 /* Last insn wasn't last? */
371 if (must_be_last)
372 return FALSE;
374 if (modified_in_p (test, insn))
376 if (!mod_ok)
377 return FALSE;
378 must_be_last = TRUE;
381 /* Now build the conditional form of the instruction. */
382 pattern = PATTERN (insn);
383 xtest = copy_rtx (test);
385 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
386 two conditions. */
387 if (GET_CODE (pattern) == COND_EXEC)
389 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
390 return FALSE;
392 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
393 COND_EXEC_TEST (pattern));
394 pattern = COND_EXEC_CODE (pattern);
397 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
399 /* If the machine needs to modify the insn being conditionally executed,
400 say for example to force a constant integer operand into a temp
401 register, do so here. */
402 #ifdef IFCVT_MODIFY_INSN
403 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
404 if (! pattern)
405 return FALSE;
406 #endif
408 validate_change (insn, &PATTERN (insn), pattern, 1);
410 if (CALL_P (insn) && prob_val >= 0)
411 validate_change (insn, &REG_NOTES (insn),
412 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
413 prob_val, REG_NOTES (insn)), 1);
415 insn_done:
416 if (insn == end)
417 break;
420 return TRUE;
423 /* Return the condition for a jump. Do not do any special processing. */
425 static rtx
426 cond_exec_get_condition (rtx_insn *jump)
428 rtx test_if, cond;
430 if (any_condjump_p (jump))
431 test_if = SET_SRC (pc_set (jump));
432 else
433 return NULL_RTX;
434 cond = XEXP (test_if, 0);
436 /* If this branches to JUMP_LABEL when the condition is false,
437 reverse the condition. */
438 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
439 && label_ref_label (XEXP (test_if, 2)) == JUMP_LABEL (jump))
441 enum rtx_code rev = reversed_comparison_code (cond, jump);
442 if (rev == UNKNOWN)
443 return NULL_RTX;
445 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
446 XEXP (cond, 1));
449 return cond;
452 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
453 to conditional execution. Return TRUE if we were successful at
454 converting the block. */
456 static int
457 cond_exec_process_if_block (ce_if_block * ce_info,
458 /* if block information */int do_multiple_p)
460 basic_block test_bb = ce_info->test_bb; /* last test block */
461 basic_block then_bb = ce_info->then_bb; /* THEN */
462 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
463 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
464 rtx_insn *then_start; /* first insn in THEN block */
465 rtx_insn *then_end; /* last insn + 1 in THEN block */
466 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
467 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
468 int max; /* max # of insns to convert. */
469 int then_mod_ok; /* whether conditional mods are ok in THEN */
470 rtx true_expr; /* test for else block insns */
471 rtx false_expr; /* test for then block insns */
472 int true_prob_val; /* probability of else block */
473 int false_prob_val; /* probability of then block */
474 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
475 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
476 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
477 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
478 int then_n_insns, else_n_insns, n_insns;
479 enum rtx_code false_code;
480 rtx note;
482 /* If test is comprised of && or || elements, and we've failed at handling
483 all of them together, just use the last test if it is the special case of
484 && elements without an ELSE block. */
485 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
487 if (else_bb || ! ce_info->and_and_p)
488 return FALSE;
490 ce_info->test_bb = test_bb = ce_info->last_test_bb;
491 ce_info->num_multiple_test_blocks = 0;
492 ce_info->num_and_and_blocks = 0;
493 ce_info->num_or_or_blocks = 0;
496 /* Find the conditional jump to the ELSE or JOIN part, and isolate
497 the test. */
498 test_expr = cond_exec_get_condition (BB_END (test_bb));
499 if (! test_expr)
500 return FALSE;
502 /* If the conditional jump is more than just a conditional jump,
503 then we can not do conditional execution conversion on this block. */
504 if (! onlyjump_p (BB_END (test_bb)))
505 return FALSE;
507 /* Collect the bounds of where we're to search, skipping any labels, jumps
508 and notes at the beginning and end of the block. Then count the total
509 number of insns and see if it is small enough to convert. */
510 then_start = first_active_insn (then_bb);
511 then_end = last_active_insn (then_bb, TRUE);
512 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
513 n_insns = then_n_insns;
514 max = MAX_CONDITIONAL_EXECUTE;
516 if (else_bb)
518 int n_matching;
520 max *= 2;
521 else_start = first_active_insn (else_bb);
522 else_end = last_active_insn (else_bb, TRUE);
523 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
524 n_insns += else_n_insns;
526 /* Look for matching sequences at the head and tail of the two blocks,
527 and limit the range of insns to be converted if possible. */
528 n_matching = flow_find_cross_jump (then_bb, else_bb,
529 &then_first_tail, &else_first_tail,
530 NULL);
531 if (then_first_tail == BB_HEAD (then_bb))
532 then_start = then_end = NULL;
533 if (else_first_tail == BB_HEAD (else_bb))
534 else_start = else_end = NULL;
536 if (n_matching > 0)
538 if (then_end)
539 then_end = find_active_insn_before (then_bb, then_first_tail);
540 if (else_end)
541 else_end = find_active_insn_before (else_bb, else_first_tail);
542 n_insns -= 2 * n_matching;
545 if (then_start
546 && else_start
547 && then_n_insns > n_matching
548 && else_n_insns > n_matching)
550 int longest_match = MIN (then_n_insns - n_matching,
551 else_n_insns - n_matching);
552 n_matching
553 = flow_find_head_matching_sequence (then_bb, else_bb,
554 &then_last_head,
555 &else_last_head,
556 longest_match);
558 if (n_matching > 0)
560 rtx_insn *insn;
562 /* We won't pass the insns in the head sequence to
563 cond_exec_process_insns, so we need to test them here
564 to make sure that they don't clobber the condition. */
565 for (insn = BB_HEAD (then_bb);
566 insn != NEXT_INSN (then_last_head);
567 insn = NEXT_INSN (insn))
568 if (!LABEL_P (insn) && !NOTE_P (insn)
569 && !DEBUG_INSN_P (insn)
570 && modified_in_p (test_expr, insn))
571 return FALSE;
574 if (then_last_head == then_end)
575 then_start = then_end = NULL;
576 if (else_last_head == else_end)
577 else_start = else_end = NULL;
579 if (n_matching > 0)
581 if (then_start)
582 then_start = find_active_insn_after (then_bb, then_last_head);
583 if (else_start)
584 else_start = find_active_insn_after (else_bb, else_last_head);
585 n_insns -= 2 * n_matching;
590 if (n_insns > max)
591 return FALSE;
593 /* Map test_expr/test_jump into the appropriate MD tests to use on
594 the conditionally executed code. */
596 true_expr = test_expr;
598 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
599 if (false_code != UNKNOWN)
600 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
601 XEXP (true_expr, 0), XEXP (true_expr, 1));
602 else
603 false_expr = NULL_RTX;
605 #ifdef IFCVT_MODIFY_TESTS
606 /* If the machine description needs to modify the tests, such as setting a
607 conditional execution register from a comparison, it can do so here. */
608 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
610 /* See if the conversion failed. */
611 if (!true_expr || !false_expr)
612 goto fail;
613 #endif
615 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
616 if (note)
618 true_prob_val = XINT (note, 0);
619 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
621 else
623 true_prob_val = -1;
624 false_prob_val = -1;
627 /* If we have && or || tests, do them here. These tests are in the adjacent
628 blocks after the first block containing the test. */
629 if (ce_info->num_multiple_test_blocks > 0)
631 basic_block bb = test_bb;
632 basic_block last_test_bb = ce_info->last_test_bb;
634 if (! false_expr)
635 goto fail;
639 rtx_insn *start, *end;
640 rtx t, f;
641 enum rtx_code f_code;
643 bb = block_fallthru (bb);
644 start = first_active_insn (bb);
645 end = last_active_insn (bb, TRUE);
646 if (start
647 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
648 false_prob_val, FALSE))
649 goto fail;
651 /* If the conditional jump is more than just a conditional jump, then
652 we can not do conditional execution conversion on this block. */
653 if (! onlyjump_p (BB_END (bb)))
654 goto fail;
656 /* Find the conditional jump and isolate the test. */
657 t = cond_exec_get_condition (BB_END (bb));
658 if (! t)
659 goto fail;
661 f_code = reversed_comparison_code (t, BB_END (bb));
662 if (f_code == UNKNOWN)
663 goto fail;
665 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
666 if (ce_info->and_and_p)
668 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
669 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
671 else
673 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
674 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
677 /* If the machine description needs to modify the tests, such as
678 setting a conditional execution register from a comparison, it can
679 do so here. */
680 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
681 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
683 /* See if the conversion failed. */
684 if (!t || !f)
685 goto fail;
686 #endif
688 true_expr = t;
689 false_expr = f;
691 while (bb != last_test_bb);
694 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
695 on then THEN block. */
696 then_mod_ok = (else_bb == NULL_BLOCK);
698 /* Go through the THEN and ELSE blocks converting the insns if possible
699 to conditional execution. */
701 if (then_end
702 && (! false_expr
703 || ! cond_exec_process_insns (ce_info, then_start, then_end,
704 false_expr, false_prob_val,
705 then_mod_ok)))
706 goto fail;
708 if (else_bb && else_end
709 && ! cond_exec_process_insns (ce_info, else_start, else_end,
710 true_expr, true_prob_val, TRUE))
711 goto fail;
713 /* If we cannot apply the changes, fail. Do not go through the normal fail
714 processing, since apply_change_group will call cancel_changes. */
715 if (! apply_change_group ())
717 #ifdef IFCVT_MODIFY_CANCEL
718 /* Cancel any machine dependent changes. */
719 IFCVT_MODIFY_CANCEL (ce_info);
720 #endif
721 return FALSE;
724 #ifdef IFCVT_MODIFY_FINAL
725 /* Do any machine dependent final modifications. */
726 IFCVT_MODIFY_FINAL (ce_info);
727 #endif
729 /* Conversion succeeded. */
730 if (dump_file)
731 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
732 n_insns, (n_insns == 1) ? " was" : "s were");
734 /* Merge the blocks! If we had matching sequences, make sure to delete one
735 copy at the appropriate location first: delete the copy in the THEN branch
736 for a tail sequence so that the remaining one is executed last for both
737 branches, and delete the copy in the ELSE branch for a head sequence so
738 that the remaining one is executed first for both branches. */
739 if (then_first_tail)
741 rtx_insn *from = then_first_tail;
742 if (!INSN_P (from))
743 from = find_active_insn_after (then_bb, from);
744 delete_insn_chain (from, get_last_bb_insn (then_bb), false);
746 if (else_last_head)
747 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
749 merge_if_block (ce_info);
750 cond_exec_changed_p = TRUE;
751 return TRUE;
753 fail:
754 #ifdef IFCVT_MODIFY_CANCEL
755 /* Cancel any machine dependent changes. */
756 IFCVT_MODIFY_CANCEL (ce_info);
757 #endif
759 cancel_changes (0);
760 return FALSE;
763 /* Used by noce_process_if_block to communicate with its subroutines.
765 The subroutines know that A and B may be evaluated freely. They
766 know that X is a register. They should insert new instructions
767 before cond_earliest. */
769 struct noce_if_info
771 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
772 basic_block test_bb, then_bb, else_bb, join_bb;
774 /* The jump that ends TEST_BB. */
775 rtx_insn *jump;
777 /* The jump condition. */
778 rtx cond;
780 /* New insns should be inserted before this one. */
781 rtx_insn *cond_earliest;
783 /* Insns in the THEN and ELSE block. There is always just this
784 one insns in those blocks. The insns are single_set insns.
785 If there was no ELSE block, INSN_B is the last insn before
786 COND_EARLIEST, or NULL_RTX. In the former case, the insn
787 operands are still valid, as if INSN_B was moved down below
788 the jump. */
789 rtx_insn *insn_a, *insn_b;
791 /* The SET_SRC of INSN_A and INSN_B. */
792 rtx a, b;
794 /* The SET_DEST of INSN_A. */
795 rtx x;
797 /* The original set destination that the THEN and ELSE basic blocks finally
798 write their result to. */
799 rtx orig_x;
800 /* True if this if block is not canonical. In the canonical form of
801 if blocks, the THEN_BB is the block reached via the fallthru edge
802 from TEST_BB. For the noce transformations, we allow the symmetric
803 form as well. */
804 bool then_else_reversed;
806 /* True if the contents of then_bb and else_bb are a
807 simple single set instruction. */
808 bool then_simple;
809 bool else_simple;
811 /* True if we're optimisizing the control block for speed, false if
812 we're optimizing for size. */
813 bool speed_p;
815 /* The combined cost of COND, JUMP and the costs for THEN_BB and
816 ELSE_BB. */
817 unsigned int original_cost;
819 /* Maximum permissible cost for the unconditional sequence we should
820 generate to replace this branch. */
821 unsigned int max_seq_cost;
823 /* The name of the noce transform that succeeded in if-converting
824 this structure. Used for debugging. */
825 const char *transform_name;
828 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
829 static int noce_try_move (struct noce_if_info *);
830 static int noce_try_ifelse_collapse (struct noce_if_info *);
831 static int noce_try_store_flag (struct noce_if_info *);
832 static int noce_try_addcc (struct noce_if_info *);
833 static int noce_try_store_flag_constants (struct noce_if_info *);
834 static int noce_try_store_flag_mask (struct noce_if_info *);
835 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
836 rtx, rtx, rtx);
837 static int noce_try_cmove (struct noce_if_info *);
838 static int noce_try_cmove_arith (struct noce_if_info *);
839 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
840 static int noce_try_minmax (struct noce_if_info *);
841 static int noce_try_abs (struct noce_if_info *);
842 static int noce_try_sign_mask (struct noce_if_info *);
844 /* Return TRUE if SEQ is a good candidate as a replacement for the
845 if-convertible sequence described in IF_INFO. */
847 inline static bool
848 noce_conversion_profitable_p (rtx_insn *seq, struct noce_if_info *if_info)
850 bool speed_p = if_info->speed_p;
852 /* Cost up the new sequence. */
853 unsigned int cost = seq_cost (seq, speed_p);
855 /* When compiling for size, we can make a reasonably accurately guess
856 at the size growth. */
857 if (!speed_p)
858 return cost <= if_info->original_cost;
859 else
860 return cost <= if_info->max_seq_cost;
863 /* Helper function for noce_try_store_flag*. */
865 static rtx
866 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
867 int normalize)
869 rtx cond = if_info->cond;
870 int cond_complex;
871 enum rtx_code code;
873 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
874 || ! general_operand (XEXP (cond, 1), VOIDmode));
876 /* If earliest == jump, or when the condition is complex, try to
877 build the store_flag insn directly. */
879 if (cond_complex)
881 rtx set = pc_set (if_info->jump);
882 cond = XEXP (SET_SRC (set), 0);
883 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
884 && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
885 reversep = !reversep;
886 if (if_info->then_else_reversed)
887 reversep = !reversep;
890 if (reversep)
891 code = reversed_comparison_code (cond, if_info->jump);
892 else
893 code = GET_CODE (cond);
895 if ((if_info->cond_earliest == if_info->jump || cond_complex)
896 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
898 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
899 XEXP (cond, 1));
900 rtx set = gen_rtx_SET (x, src);
902 start_sequence ();
903 rtx_insn *insn = emit_insn (set);
905 if (recog_memoized (insn) >= 0)
907 rtx_insn *seq = get_insns ();
908 end_sequence ();
909 emit_insn (seq);
911 if_info->cond_earliest = if_info->jump;
913 return x;
916 end_sequence ();
919 /* Don't even try if the comparison operands or the mode of X are weird. */
920 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
921 return NULL_RTX;
923 return emit_store_flag (x, code, XEXP (cond, 0),
924 XEXP (cond, 1), VOIDmode,
925 (code == LTU || code == LEU
926 || code == GEU || code == GTU), normalize);
929 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
930 X is the destination/target and Y is the value to copy. */
932 static void
933 noce_emit_move_insn (rtx x, rtx y)
935 machine_mode outmode;
936 rtx outer, inner;
937 int bitpos;
939 if (GET_CODE (x) != STRICT_LOW_PART)
941 rtx_insn *seq, *insn;
942 rtx target;
943 optab ot;
945 start_sequence ();
946 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
947 otherwise construct a suitable SET pattern ourselves. */
948 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
949 ? emit_move_insn (x, y)
950 : emit_insn (gen_rtx_SET (x, y));
951 seq = get_insns ();
952 end_sequence ();
954 if (recog_memoized (insn) <= 0)
956 if (GET_CODE (x) == ZERO_EXTRACT)
958 rtx op = XEXP (x, 0);
959 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
960 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
962 /* store_bit_field expects START to be relative to
963 BYTES_BIG_ENDIAN and adjusts this value for machines with
964 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
965 invoke store_bit_field again it is necessary to have the START
966 value from the first call. */
967 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
969 if (MEM_P (op))
970 start = BITS_PER_UNIT - start - size;
971 else
973 gcc_assert (REG_P (op));
974 start = BITS_PER_WORD - start - size;
978 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
979 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y, false);
980 return;
983 switch (GET_RTX_CLASS (GET_CODE (y)))
985 case RTX_UNARY:
986 ot = code_to_optab (GET_CODE (y));
987 if (ot)
989 start_sequence ();
990 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
991 if (target != NULL_RTX)
993 if (target != x)
994 emit_move_insn (x, target);
995 seq = get_insns ();
997 end_sequence ();
999 break;
1001 case RTX_BIN_ARITH:
1002 case RTX_COMM_ARITH:
1003 ot = code_to_optab (GET_CODE (y));
1004 if (ot)
1006 start_sequence ();
1007 target = expand_binop (GET_MODE (y), ot,
1008 XEXP (y, 0), XEXP (y, 1),
1009 x, 0, OPTAB_DIRECT);
1010 if (target != NULL_RTX)
1012 if (target != x)
1013 emit_move_insn (x, target);
1014 seq = get_insns ();
1016 end_sequence ();
1018 break;
1020 default:
1021 break;
1025 emit_insn (seq);
1026 return;
1029 outer = XEXP (x, 0);
1030 inner = XEXP (outer, 0);
1031 outmode = GET_MODE (outer);
1032 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1033 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1034 0, 0, outmode, y, false);
1037 /* Return the CC reg if it is used in COND. */
1039 static rtx
1040 cc_in_cond (rtx cond)
1042 if (have_cbranchcc4 && cond
1043 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1044 return XEXP (cond, 0);
1046 return NULL_RTX;
1049 /* Return sequence of instructions generated by if conversion. This
1050 function calls end_sequence() to end the current stream, ensures
1051 that the instructions are unshared, recognizable non-jump insns.
1052 On failure, this function returns a NULL_RTX. */
1054 static rtx_insn *
1055 end_ifcvt_sequence (struct noce_if_info *if_info)
1057 rtx_insn *insn;
1058 rtx_insn *seq = get_insns ();
1059 rtx cc = cc_in_cond (if_info->cond);
1061 set_used_flags (if_info->x);
1062 set_used_flags (if_info->cond);
1063 set_used_flags (if_info->a);
1064 set_used_flags (if_info->b);
1066 for (insn = seq; insn; insn = NEXT_INSN (insn))
1067 set_used_flags (insn);
1069 unshare_all_rtl_in_chain (seq);
1070 end_sequence ();
1072 /* Make sure that all of the instructions emitted are recognizable,
1073 and that we haven't introduced a new jump instruction.
1074 As an exercise for the reader, build a general mechanism that
1075 allows proper placement of required clobbers. */
1076 for (insn = seq; insn; insn = NEXT_INSN (insn))
1077 if (JUMP_P (insn)
1078 || recog_memoized (insn) == -1
1079 /* Make sure new generated code does not clobber CC. */
1080 || (cc && set_of (cc, insn)))
1081 return NULL;
1083 return seq;
1086 /* Return true iff the then and else basic block (if it exists)
1087 consist of a single simple set instruction. */
1089 static bool
1090 noce_simple_bbs (struct noce_if_info *if_info)
1092 if (!if_info->then_simple)
1093 return false;
1095 if (if_info->else_bb)
1096 return if_info->else_simple;
1098 return true;
1101 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1102 "if (a == b) x = a; else x = b" into "x = b". */
1104 static int
1105 noce_try_move (struct noce_if_info *if_info)
1107 rtx cond = if_info->cond;
1108 enum rtx_code code = GET_CODE (cond);
1109 rtx y;
1110 rtx_insn *seq;
1112 if (code != NE && code != EQ)
1113 return FALSE;
1115 if (!noce_simple_bbs (if_info))
1116 return FALSE;
1118 /* This optimization isn't valid if either A or B could be a NaN
1119 or a signed zero. */
1120 if (HONOR_NANS (if_info->x)
1121 || HONOR_SIGNED_ZEROS (if_info->x))
1122 return FALSE;
1124 /* Check whether the operands of the comparison are A and in
1125 either order. */
1126 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1127 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1128 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1129 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1131 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1132 return FALSE;
1134 y = (code == EQ) ? if_info->a : if_info->b;
1136 /* Avoid generating the move if the source is the destination. */
1137 if (! rtx_equal_p (if_info->x, y))
1139 start_sequence ();
1140 noce_emit_move_insn (if_info->x, y);
1141 seq = end_ifcvt_sequence (if_info);
1142 if (!seq)
1143 return FALSE;
1145 emit_insn_before_setloc (seq, if_info->jump,
1146 INSN_LOCATION (if_info->insn_a));
1148 if_info->transform_name = "noce_try_move";
1149 return TRUE;
1151 return FALSE;
1154 /* Try forming an IF_THEN_ELSE (cond, b, a) and collapsing that
1155 through simplify_rtx. Sometimes that can eliminate the IF_THEN_ELSE.
1156 If that is the case, emit the result into x. */
1158 static int
1159 noce_try_ifelse_collapse (struct noce_if_info * if_info)
1161 if (!noce_simple_bbs (if_info))
1162 return FALSE;
1164 machine_mode mode = GET_MODE (if_info->x);
1165 rtx if_then_else = simplify_gen_ternary (IF_THEN_ELSE, mode, mode,
1166 if_info->cond, if_info->b,
1167 if_info->a);
1169 if (GET_CODE (if_then_else) == IF_THEN_ELSE)
1170 return FALSE;
1172 rtx_insn *seq;
1173 start_sequence ();
1174 noce_emit_move_insn (if_info->x, if_then_else);
1175 seq = end_ifcvt_sequence (if_info);
1176 if (!seq)
1177 return FALSE;
1179 emit_insn_before_setloc (seq, if_info->jump,
1180 INSN_LOCATION (if_info->insn_a));
1182 if_info->transform_name = "noce_try_ifelse_collapse";
1183 return TRUE;
1187 /* Convert "if (test) x = 1; else x = 0".
1189 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1190 tried in noce_try_store_flag_constants after noce_try_cmove has had
1191 a go at the conversion. */
1193 static int
1194 noce_try_store_flag (struct noce_if_info *if_info)
1196 int reversep;
1197 rtx target;
1198 rtx_insn *seq;
1200 if (!noce_simple_bbs (if_info))
1201 return FALSE;
1203 if (CONST_INT_P (if_info->b)
1204 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1205 && if_info->a == const0_rtx)
1206 reversep = 0;
1207 else if (if_info->b == const0_rtx
1208 && CONST_INT_P (if_info->a)
1209 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1210 && (reversed_comparison_code (if_info->cond, if_info->jump)
1211 != UNKNOWN))
1212 reversep = 1;
1213 else
1214 return FALSE;
1216 start_sequence ();
1218 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1219 if (target)
1221 if (target != if_info->x)
1222 noce_emit_move_insn (if_info->x, target);
1224 seq = end_ifcvt_sequence (if_info);
1225 if (! seq)
1226 return FALSE;
1228 emit_insn_before_setloc (seq, if_info->jump,
1229 INSN_LOCATION (if_info->insn_a));
1230 if_info->transform_name = "noce_try_store_flag";
1231 return TRUE;
1233 else
1235 end_sequence ();
1236 return FALSE;
1241 /* Convert "if (test) x = -A; else x = A" into
1242 x = A; if (test) x = -x if the machine can do the
1243 conditional negate form of this cheaply.
1244 Try this before noce_try_cmove that will just load the
1245 immediates into two registers and do a conditional select
1246 between them. If the target has a conditional negate or
1247 conditional invert operation we can save a potentially
1248 expensive constant synthesis. */
1250 static bool
1251 noce_try_inverse_constants (struct noce_if_info *if_info)
1253 if (!noce_simple_bbs (if_info))
1254 return false;
1256 if (!CONST_INT_P (if_info->a)
1257 || !CONST_INT_P (if_info->b)
1258 || !REG_P (if_info->x))
1259 return false;
1261 machine_mode mode = GET_MODE (if_info->x);
1263 HOST_WIDE_INT val_a = INTVAL (if_info->a);
1264 HOST_WIDE_INT val_b = INTVAL (if_info->b);
1266 rtx cond = if_info->cond;
1268 rtx x = if_info->x;
1269 rtx target;
1271 start_sequence ();
1273 rtx_code code;
1274 if (val_b != HOST_WIDE_INT_MIN && val_a == -val_b)
1275 code = NEG;
1276 else if (val_a == ~val_b)
1277 code = NOT;
1278 else
1280 end_sequence ();
1281 return false;
1284 rtx tmp = gen_reg_rtx (mode);
1285 noce_emit_move_insn (tmp, if_info->a);
1287 target = emit_conditional_neg_or_complement (x, code, mode, cond, tmp, tmp);
1289 if (target)
1291 rtx_insn *seq = get_insns ();
1293 if (!seq)
1295 end_sequence ();
1296 return false;
1299 if (target != if_info->x)
1300 noce_emit_move_insn (if_info->x, target);
1302 seq = end_ifcvt_sequence (if_info);
1304 if (!seq)
1305 return false;
1307 emit_insn_before_setloc (seq, if_info->jump,
1308 INSN_LOCATION (if_info->insn_a));
1309 if_info->transform_name = "noce_try_inverse_constants";
1310 return true;
1313 end_sequence ();
1314 return false;
1318 /* Convert "if (test) x = a; else x = b", for A and B constant.
1319 Also allow A = y + c1, B = y + c2, with a common y between A
1320 and B. */
1322 static int
1323 noce_try_store_flag_constants (struct noce_if_info *if_info)
1325 rtx target;
1326 rtx_insn *seq;
1327 bool reversep;
1328 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1329 int normalize;
1330 bool can_reverse;
1331 machine_mode mode = GET_MODE (if_info->x);;
1332 rtx common = NULL_RTX;
1334 rtx a = if_info->a;
1335 rtx b = if_info->b;
1337 /* Handle cases like x := test ? y + 3 : y + 4. */
1338 if (GET_CODE (a) == PLUS
1339 && GET_CODE (b) == PLUS
1340 && CONST_INT_P (XEXP (a, 1))
1341 && CONST_INT_P (XEXP (b, 1))
1342 && rtx_equal_p (XEXP (a, 0), XEXP (b, 0))
1343 /* Allow expressions that are not using the result or plain
1344 registers where we handle overlap below. */
1345 && (REG_P (XEXP (a, 0))
1346 || (noce_operand_ok (XEXP (a, 0))
1347 && ! reg_overlap_mentioned_p (if_info->x, XEXP (a, 0)))))
1349 common = XEXP (a, 0);
1350 a = XEXP (a, 1);
1351 b = XEXP (b, 1);
1354 if (!noce_simple_bbs (if_info))
1355 return FALSE;
1357 if (CONST_INT_P (a)
1358 && CONST_INT_P (b))
1360 ifalse = INTVAL (a);
1361 itrue = INTVAL (b);
1362 bool subtract_flag_p = false;
1364 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1365 /* Make sure we can represent the difference between the two values. */
1366 if ((diff > 0)
1367 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1368 return FALSE;
1370 diff = trunc_int_for_mode (diff, mode);
1372 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1373 != UNKNOWN);
1375 reversep = false;
1376 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1378 normalize = 0;
1379 /* We could collapse these cases but it is easier to follow the
1380 diff/STORE_FLAG_VALUE combinations when they are listed
1381 explicitly. */
1383 /* test ? 3 : 4
1384 => 4 + (test != 0). */
1385 if (diff < 0 && STORE_FLAG_VALUE < 0)
1386 reversep = false;
1387 /* test ? 4 : 3
1388 => can_reverse | 4 + (test == 0)
1389 !can_reverse | 3 - (test != 0). */
1390 else if (diff > 0 && STORE_FLAG_VALUE < 0)
1392 reversep = can_reverse;
1393 subtract_flag_p = !can_reverse;
1394 /* If we need to subtract the flag and we have PLUS-immediate
1395 A and B then it is unlikely to be beneficial to play tricks
1396 here. */
1397 if (subtract_flag_p && common)
1398 return FALSE;
1400 /* test ? 3 : 4
1401 => can_reverse | 3 + (test == 0)
1402 !can_reverse | 4 - (test != 0). */
1403 else if (diff < 0 && STORE_FLAG_VALUE > 0)
1405 reversep = can_reverse;
1406 subtract_flag_p = !can_reverse;
1407 /* If we need to subtract the flag and we have PLUS-immediate
1408 A and B then it is unlikely to be beneficial to play tricks
1409 here. */
1410 if (subtract_flag_p && common)
1411 return FALSE;
1413 /* test ? 4 : 3
1414 => 4 + (test != 0). */
1415 else if (diff > 0 && STORE_FLAG_VALUE > 0)
1416 reversep = false;
1417 else
1418 gcc_unreachable ();
1420 /* Is this (cond) ? 2^n : 0? */
1421 else if (ifalse == 0 && pow2p_hwi (itrue)
1422 && STORE_FLAG_VALUE == 1)
1423 normalize = 1;
1424 /* Is this (cond) ? 0 : 2^n? */
1425 else if (itrue == 0 && pow2p_hwi (ifalse) && can_reverse
1426 && STORE_FLAG_VALUE == 1)
1428 normalize = 1;
1429 reversep = true;
1431 /* Is this (cond) ? -1 : x? */
1432 else if (itrue == -1
1433 && STORE_FLAG_VALUE == -1)
1434 normalize = -1;
1435 /* Is this (cond) ? x : -1? */
1436 else if (ifalse == -1 && can_reverse
1437 && STORE_FLAG_VALUE == -1)
1439 normalize = -1;
1440 reversep = true;
1442 else
1443 return FALSE;
1445 if (reversep)
1447 std::swap (itrue, ifalse);
1448 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1451 start_sequence ();
1453 /* If we have x := test ? x + 3 : x + 4 then move the original
1454 x out of the way while we store flags. */
1455 if (common && rtx_equal_p (common, if_info->x))
1457 common = gen_reg_rtx (mode);
1458 noce_emit_move_insn (common, if_info->x);
1461 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1462 if (! target)
1464 end_sequence ();
1465 return FALSE;
1468 /* if (test) x = 3; else x = 4;
1469 => x = 3 + (test == 0); */
1470 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1472 /* Add the common part now. This may allow combine to merge this
1473 with the store flag operation earlier into some sort of conditional
1474 increment/decrement if the target allows it. */
1475 if (common)
1476 target = expand_simple_binop (mode, PLUS,
1477 target, common,
1478 target, 0, OPTAB_WIDEN);
1480 /* Always use ifalse here. It should have been swapped with itrue
1481 when appropriate when reversep is true. */
1482 target = expand_simple_binop (mode, subtract_flag_p ? MINUS : PLUS,
1483 gen_int_mode (ifalse, mode), target,
1484 if_info->x, 0, OPTAB_WIDEN);
1486 /* Other cases are not beneficial when the original A and B are PLUS
1487 expressions. */
1488 else if (common)
1490 end_sequence ();
1491 return FALSE;
1493 /* if (test) x = 8; else x = 0;
1494 => x = (test != 0) << 3; */
1495 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1497 target = expand_simple_binop (mode, ASHIFT,
1498 target, GEN_INT (tmp), if_info->x, 0,
1499 OPTAB_WIDEN);
1502 /* if (test) x = -1; else x = b;
1503 => x = -(test != 0) | b; */
1504 else if (itrue == -1)
1506 target = expand_simple_binop (mode, IOR,
1507 target, gen_int_mode (ifalse, mode),
1508 if_info->x, 0, OPTAB_WIDEN);
1510 else
1512 end_sequence ();
1513 return FALSE;
1516 if (! target)
1518 end_sequence ();
1519 return FALSE;
1522 if (target != if_info->x)
1523 noce_emit_move_insn (if_info->x, target);
1525 seq = end_ifcvt_sequence (if_info);
1526 if (!seq || !noce_conversion_profitable_p (seq, if_info))
1527 return FALSE;
1529 emit_insn_before_setloc (seq, if_info->jump,
1530 INSN_LOCATION (if_info->insn_a));
1531 if_info->transform_name = "noce_try_store_flag_constants";
1533 return TRUE;
1536 return FALSE;
1539 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1540 similarly for "foo--". */
1542 static int
1543 noce_try_addcc (struct noce_if_info *if_info)
1545 rtx target;
1546 rtx_insn *seq;
1547 int subtract, normalize;
1549 if (!noce_simple_bbs (if_info))
1550 return FALSE;
1552 if (GET_CODE (if_info->a) == PLUS
1553 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1554 && (reversed_comparison_code (if_info->cond, if_info->jump)
1555 != UNKNOWN))
1557 rtx cond = if_info->cond;
1558 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1560 /* First try to use addcc pattern. */
1561 if (general_operand (XEXP (cond, 0), VOIDmode)
1562 && general_operand (XEXP (cond, 1), VOIDmode))
1564 start_sequence ();
1565 target = emit_conditional_add (if_info->x, code,
1566 XEXP (cond, 0),
1567 XEXP (cond, 1),
1568 VOIDmode,
1569 if_info->b,
1570 XEXP (if_info->a, 1),
1571 GET_MODE (if_info->x),
1572 (code == LTU || code == GEU
1573 || code == LEU || code == GTU));
1574 if (target)
1576 if (target != if_info->x)
1577 noce_emit_move_insn (if_info->x, target);
1579 seq = end_ifcvt_sequence (if_info);
1580 if (!seq || !noce_conversion_profitable_p (seq, if_info))
1581 return FALSE;
1583 emit_insn_before_setloc (seq, if_info->jump,
1584 INSN_LOCATION (if_info->insn_a));
1585 if_info->transform_name = "noce_try_addcc";
1587 return TRUE;
1589 end_sequence ();
1592 /* If that fails, construct conditional increment or decrement using
1593 setcc. We're changing a branch and an increment to a comparison and
1594 an ADD/SUB. */
1595 if (XEXP (if_info->a, 1) == const1_rtx
1596 || XEXP (if_info->a, 1) == constm1_rtx)
1598 start_sequence ();
1599 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1600 subtract = 0, normalize = 0;
1601 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1602 subtract = 1, normalize = 0;
1603 else
1604 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1607 target = noce_emit_store_flag (if_info,
1608 gen_reg_rtx (GET_MODE (if_info->x)),
1609 1, normalize);
1611 if (target)
1612 target = expand_simple_binop (GET_MODE (if_info->x),
1613 subtract ? MINUS : PLUS,
1614 if_info->b, target, if_info->x,
1615 0, OPTAB_WIDEN);
1616 if (target)
1618 if (target != if_info->x)
1619 noce_emit_move_insn (if_info->x, target);
1621 seq = end_ifcvt_sequence (if_info);
1622 if (!seq || !noce_conversion_profitable_p (seq, if_info))
1623 return FALSE;
1625 emit_insn_before_setloc (seq, if_info->jump,
1626 INSN_LOCATION (if_info->insn_a));
1627 if_info->transform_name = "noce_try_addcc";
1628 return TRUE;
1630 end_sequence ();
1634 return FALSE;
1637 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1639 static int
1640 noce_try_store_flag_mask (struct noce_if_info *if_info)
1642 rtx target;
1643 rtx_insn *seq;
1644 int reversep;
1646 if (!noce_simple_bbs (if_info))
1647 return FALSE;
1649 reversep = 0;
1651 if ((if_info->a == const0_rtx
1652 && rtx_equal_p (if_info->b, if_info->x))
1653 || ((reversep = (reversed_comparison_code (if_info->cond,
1654 if_info->jump)
1655 != UNKNOWN))
1656 && if_info->b == const0_rtx
1657 && rtx_equal_p (if_info->a, if_info->x)))
1659 start_sequence ();
1660 target = noce_emit_store_flag (if_info,
1661 gen_reg_rtx (GET_MODE (if_info->x)),
1662 reversep, -1);
1663 if (target)
1664 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1665 if_info->x,
1666 target, if_info->x, 0,
1667 OPTAB_WIDEN);
1669 if (target)
1671 if (target != if_info->x)
1672 noce_emit_move_insn (if_info->x, target);
1674 seq = end_ifcvt_sequence (if_info);
1675 if (!seq || !noce_conversion_profitable_p (seq, if_info))
1676 return FALSE;
1678 emit_insn_before_setloc (seq, if_info->jump,
1679 INSN_LOCATION (if_info->insn_a));
1680 if_info->transform_name = "noce_try_store_flag_mask";
1682 return TRUE;
1685 end_sequence ();
1688 return FALSE;
1691 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1693 static rtx
1694 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1695 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1697 rtx target ATTRIBUTE_UNUSED;
1698 int unsignedp ATTRIBUTE_UNUSED;
1700 /* If earliest == jump, try to build the cmove insn directly.
1701 This is helpful when combine has created some complex condition
1702 (like for alpha's cmovlbs) that we can't hope to regenerate
1703 through the normal interface. */
1705 if (if_info->cond_earliest == if_info->jump)
1707 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1708 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1709 cond, vtrue, vfalse);
1710 rtx set = gen_rtx_SET (x, if_then_else);
1712 start_sequence ();
1713 rtx_insn *insn = emit_insn (set);
1715 if (recog_memoized (insn) >= 0)
1717 rtx_insn *seq = get_insns ();
1718 end_sequence ();
1719 emit_insn (seq);
1721 return x;
1724 end_sequence ();
1727 /* Don't even try if the comparison operands are weird
1728 except that the target supports cbranchcc4. */
1729 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1730 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1732 if (!have_cbranchcc4
1733 || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1734 || cmp_b != const0_rtx)
1735 return NULL_RTX;
1738 unsignedp = (code == LTU || code == GEU
1739 || code == LEU || code == GTU);
1741 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1742 vtrue, vfalse, GET_MODE (x),
1743 unsignedp);
1744 if (target)
1745 return target;
1747 /* We might be faced with a situation like:
1749 x = (reg:M TARGET)
1750 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1751 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1753 We can't do a conditional move in mode M, but it's possible that we
1754 could do a conditional move in mode N instead and take a subreg of
1755 the result.
1757 If we can't create new pseudos, though, don't bother. */
1758 if (reload_completed)
1759 return NULL_RTX;
1761 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1763 rtx reg_vtrue = SUBREG_REG (vtrue);
1764 rtx reg_vfalse = SUBREG_REG (vfalse);
1765 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1766 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1767 rtx promoted_target;
1769 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1770 || byte_vtrue != byte_vfalse
1771 || (SUBREG_PROMOTED_VAR_P (vtrue)
1772 != SUBREG_PROMOTED_VAR_P (vfalse))
1773 || (SUBREG_PROMOTED_GET (vtrue)
1774 != SUBREG_PROMOTED_GET (vfalse)))
1775 return NULL_RTX;
1777 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1779 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1780 VOIDmode, reg_vtrue, reg_vfalse,
1781 GET_MODE (reg_vtrue), unsignedp);
1782 /* Nope, couldn't do it in that mode either. */
1783 if (!target)
1784 return NULL_RTX;
1786 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1787 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1788 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1789 emit_move_insn (x, target);
1790 return x;
1792 else
1793 return NULL_RTX;
1796 /* Try only simple constants and registers here. More complex cases
1797 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1798 has had a go at it. */
1800 static int
1801 noce_try_cmove (struct noce_if_info *if_info)
1803 enum rtx_code code;
1804 rtx target;
1805 rtx_insn *seq;
1807 if (!noce_simple_bbs (if_info))
1808 return FALSE;
1810 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1811 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1813 start_sequence ();
1815 code = GET_CODE (if_info->cond);
1816 target = noce_emit_cmove (if_info, if_info->x, code,
1817 XEXP (if_info->cond, 0),
1818 XEXP (if_info->cond, 1),
1819 if_info->a, if_info->b);
1821 if (target)
1823 if (target != if_info->x)
1824 noce_emit_move_insn (if_info->x, target);
1826 seq = end_ifcvt_sequence (if_info);
1827 if (!seq)
1828 return FALSE;
1830 emit_insn_before_setloc (seq, if_info->jump,
1831 INSN_LOCATION (if_info->insn_a));
1832 if_info->transform_name = "noce_try_cmove";
1834 return TRUE;
1836 /* If both a and b are constants try a last-ditch transformation:
1837 if (test) x = a; else x = b;
1838 => x = (-(test != 0) & (b - a)) + a;
1839 Try this only if the target-specific expansion above has failed.
1840 The target-specific expander may want to generate sequences that
1841 we don't know about, so give them a chance before trying this
1842 approach. */
1843 else if (!targetm.have_conditional_execution ()
1844 && CONST_INT_P (if_info->a) && CONST_INT_P (if_info->b))
1846 machine_mode mode = GET_MODE (if_info->x);
1847 HOST_WIDE_INT ifalse = INTVAL (if_info->a);
1848 HOST_WIDE_INT itrue = INTVAL (if_info->b);
1849 rtx target = noce_emit_store_flag (if_info, if_info->x, false, -1);
1850 if (!target)
1852 end_sequence ();
1853 return FALSE;
1856 HOST_WIDE_INT diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1857 /* Make sure we can represent the difference
1858 between the two values. */
1859 if ((diff > 0)
1860 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1862 end_sequence ();
1863 return FALSE;
1866 diff = trunc_int_for_mode (diff, mode);
1867 target = expand_simple_binop (mode, AND,
1868 target, gen_int_mode (diff, mode),
1869 if_info->x, 0, OPTAB_WIDEN);
1870 if (target)
1871 target = expand_simple_binop (mode, PLUS,
1872 target, gen_int_mode (ifalse, mode),
1873 if_info->x, 0, OPTAB_WIDEN);
1874 if (target)
1876 if (target != if_info->x)
1877 noce_emit_move_insn (if_info->x, target);
1879 seq = end_ifcvt_sequence (if_info);
1880 if (!seq || !noce_conversion_profitable_p (seq, if_info))
1881 return FALSE;
1883 emit_insn_before_setloc (seq, if_info->jump,
1884 INSN_LOCATION (if_info->insn_a));
1885 if_info->transform_name = "noce_try_cmove";
1886 return TRUE;
1888 else
1890 end_sequence ();
1891 return FALSE;
1894 else
1895 end_sequence ();
1898 return FALSE;
1901 /* Return true if X contains a conditional code mode rtx. */
1903 static bool
1904 contains_ccmode_rtx_p (rtx x)
1906 subrtx_iterator::array_type array;
1907 FOR_EACH_SUBRTX (iter, array, x, ALL)
1908 if (GET_MODE_CLASS (GET_MODE (*iter)) == MODE_CC)
1909 return true;
1911 return false;
1914 /* Helper for bb_valid_for_noce_process_p. Validate that
1915 the rtx insn INSN is a single set that does not set
1916 the conditional register CC and is in general valid for
1917 if-conversion. */
1919 static bool
1920 insn_valid_noce_process_p (rtx_insn *insn, rtx cc)
1922 if (!insn
1923 || !NONJUMP_INSN_P (insn)
1924 || (cc && set_of (cc, insn)))
1925 return false;
1927 rtx sset = single_set (insn);
1929 /* Currently support only simple single sets in test_bb. */
1930 if (!sset
1931 || !noce_operand_ok (SET_DEST (sset))
1932 || contains_ccmode_rtx_p (SET_DEST (sset))
1933 || !noce_operand_ok (SET_SRC (sset)))
1934 return false;
1936 return true;
1940 /* Return true iff the registers that the insns in BB_A set do not get
1941 used in BB_B. If TO_RENAME is non-NULL then it is a location that will be
1942 renamed later by the caller and so conflicts on it should be ignored
1943 in this function. */
1945 static bool
1946 bbs_ok_for_cmove_arith (basic_block bb_a, basic_block bb_b, rtx to_rename)
1948 rtx_insn *a_insn;
1949 bitmap bba_sets = BITMAP_ALLOC (&reg_obstack);
1951 df_ref def;
1952 df_ref use;
1954 FOR_BB_INSNS (bb_a, a_insn)
1956 if (!active_insn_p (a_insn))
1957 continue;
1959 rtx sset_a = single_set (a_insn);
1961 if (!sset_a)
1963 BITMAP_FREE (bba_sets);
1964 return false;
1966 /* Record all registers that BB_A sets. */
1967 FOR_EACH_INSN_DEF (def, a_insn)
1968 if (!(to_rename && DF_REF_REG (def) == to_rename))
1969 bitmap_set_bit (bba_sets, DF_REF_REGNO (def));
1972 rtx_insn *b_insn;
1974 FOR_BB_INSNS (bb_b, b_insn)
1976 if (!active_insn_p (b_insn))
1977 continue;
1979 rtx sset_b = single_set (b_insn);
1981 if (!sset_b)
1983 BITMAP_FREE (bba_sets);
1984 return false;
1987 /* Make sure this is a REG and not some instance
1988 of ZERO_EXTRACT or SUBREG or other dangerous stuff.
1989 If we have a memory destination then we have a pair of simple
1990 basic blocks performing an operation of the form [addr] = c ? a : b.
1991 bb_valid_for_noce_process_p will have ensured that these are
1992 the only stores present. In that case [addr] should be the location
1993 to be renamed. Assert that the callers set this up properly. */
1994 if (MEM_P (SET_DEST (sset_b)))
1995 gcc_assert (rtx_equal_p (SET_DEST (sset_b), to_rename));
1996 else if (!REG_P (SET_DEST (sset_b)))
1998 BITMAP_FREE (bba_sets);
1999 return false;
2002 /* If the insn uses a reg set in BB_A return false. */
2003 FOR_EACH_INSN_USE (use, b_insn)
2005 if (bitmap_bit_p (bba_sets, DF_REF_REGNO (use)))
2007 BITMAP_FREE (bba_sets);
2008 return false;
2014 BITMAP_FREE (bba_sets);
2015 return true;
2018 /* Emit copies of all the active instructions in BB except the last.
2019 This is a helper for noce_try_cmove_arith. */
2021 static void
2022 noce_emit_all_but_last (basic_block bb)
2024 rtx_insn *last = last_active_insn (bb, FALSE);
2025 rtx_insn *insn;
2026 FOR_BB_INSNS (bb, insn)
2028 if (insn != last && active_insn_p (insn))
2030 rtx_insn *to_emit = as_a <rtx_insn *> (copy_rtx (insn));
2032 emit_insn (PATTERN (to_emit));
2037 /* Helper for noce_try_cmove_arith. Emit the pattern TO_EMIT and return
2038 the resulting insn or NULL if it's not a valid insn. */
2040 static rtx_insn *
2041 noce_emit_insn (rtx to_emit)
2043 gcc_assert (to_emit);
2044 rtx_insn *insn = emit_insn (to_emit);
2046 if (recog_memoized (insn) < 0)
2047 return NULL;
2049 return insn;
2052 /* Helper for noce_try_cmove_arith. Emit a copy of the insns up to
2053 and including the penultimate one in BB if it is not simple
2054 (as indicated by SIMPLE). Then emit LAST_INSN as the last
2055 insn in the block. The reason for that is that LAST_INSN may
2056 have been modified by the preparation in noce_try_cmove_arith. */
2058 static bool
2059 noce_emit_bb (rtx last_insn, basic_block bb, bool simple)
2061 if (bb && !simple)
2062 noce_emit_all_but_last (bb);
2064 if (last_insn && !noce_emit_insn (last_insn))
2065 return false;
2067 return true;
2070 /* Try more complex cases involving conditional_move. */
2072 static int
2073 noce_try_cmove_arith (struct noce_if_info *if_info)
2075 rtx a = if_info->a;
2076 rtx b = if_info->b;
2077 rtx x = if_info->x;
2078 rtx orig_a, orig_b;
2079 rtx_insn *insn_a, *insn_b;
2080 bool a_simple = if_info->then_simple;
2081 bool b_simple = if_info->else_simple;
2082 basic_block then_bb = if_info->then_bb;
2083 basic_block else_bb = if_info->else_bb;
2084 rtx target;
2085 int is_mem = 0;
2086 enum rtx_code code;
2087 rtx_insn *ifcvt_seq;
2089 /* A conditional move from two memory sources is equivalent to a
2090 conditional on their addresses followed by a load. Don't do this
2091 early because it'll screw alias analysis. Note that we've
2092 already checked for no side effects. */
2093 if (cse_not_expected
2094 && MEM_P (a) && MEM_P (b)
2095 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b))
2097 machine_mode address_mode = get_address_mode (a);
2099 a = XEXP (a, 0);
2100 b = XEXP (b, 0);
2101 x = gen_reg_rtx (address_mode);
2102 is_mem = 1;
2105 /* ??? We could handle this if we knew that a load from A or B could
2106 not trap or fault. This is also true if we've already loaded
2107 from the address along the path from ENTRY. */
2108 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
2109 return FALSE;
2111 /* if (test) x = a + b; else x = c - d;
2112 => y = a + b;
2113 x = c - d;
2114 if (test)
2115 x = y;
2118 code = GET_CODE (if_info->cond);
2119 insn_a = if_info->insn_a;
2120 insn_b = if_info->insn_b;
2122 machine_mode x_mode = GET_MODE (x);
2124 if (!can_conditionally_move_p (x_mode))
2125 return FALSE;
2127 /* Possibly rearrange operands to make things come out more natural. */
2128 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
2130 int reversep = 0;
2131 if (rtx_equal_p (b, x))
2132 reversep = 1;
2133 else if (general_operand (b, GET_MODE (b)))
2134 reversep = 1;
2136 if (reversep)
2138 code = reversed_comparison_code (if_info->cond, if_info->jump);
2139 std::swap (a, b);
2140 std::swap (insn_a, insn_b);
2141 std::swap (a_simple, b_simple);
2142 std::swap (then_bb, else_bb);
2146 if (then_bb && else_bb
2147 && (!bbs_ok_for_cmove_arith (then_bb, else_bb, if_info->orig_x)
2148 || !bbs_ok_for_cmove_arith (else_bb, then_bb, if_info->orig_x)))
2149 return FALSE;
2151 start_sequence ();
2153 /* If one of the blocks is empty then the corresponding B or A value
2154 came from the test block. The non-empty complex block that we will
2155 emit might clobber the register used by B or A, so move it to a pseudo
2156 first. */
2158 rtx tmp_a = NULL_RTX;
2159 rtx tmp_b = NULL_RTX;
2161 if (b_simple || !else_bb)
2162 tmp_b = gen_reg_rtx (x_mode);
2164 if (a_simple || !then_bb)
2165 tmp_a = gen_reg_rtx (x_mode);
2167 orig_a = a;
2168 orig_b = b;
2170 rtx emit_a = NULL_RTX;
2171 rtx emit_b = NULL_RTX;
2172 rtx_insn *tmp_insn = NULL;
2173 bool modified_in_a = false;
2174 bool modified_in_b = false;
2175 /* If either operand is complex, load it into a register first.
2176 The best way to do this is to copy the original insn. In this
2177 way we preserve any clobbers etc that the insn may have had.
2178 This is of course not possible in the IS_MEM case. */
2180 if (! general_operand (a, GET_MODE (a)) || tmp_a)
2183 if (is_mem)
2185 rtx reg = gen_reg_rtx (GET_MODE (a));
2186 emit_a = gen_rtx_SET (reg, a);
2188 else
2190 if (insn_a)
2192 a = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2194 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
2195 rtx set = single_set (copy_of_a);
2196 SET_DEST (set) = a;
2198 emit_a = PATTERN (copy_of_a);
2200 else
2202 rtx tmp_reg = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2203 emit_a = gen_rtx_SET (tmp_reg, a);
2204 a = tmp_reg;
2209 if (! general_operand (b, GET_MODE (b)) || tmp_b)
2211 if (is_mem)
2213 rtx reg = gen_reg_rtx (GET_MODE (b));
2214 emit_b = gen_rtx_SET (reg, b);
2216 else
2218 if (insn_b)
2220 b = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2221 rtx_insn *copy_of_b = as_a <rtx_insn *> (copy_rtx (insn_b));
2222 rtx set = single_set (copy_of_b);
2224 SET_DEST (set) = b;
2225 emit_b = PATTERN (copy_of_b);
2227 else
2229 rtx tmp_reg = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2230 emit_b = gen_rtx_SET (tmp_reg, b);
2231 b = tmp_reg;
2236 modified_in_a = emit_a != NULL_RTX && modified_in_p (orig_b, emit_a);
2237 if (tmp_b && then_bb)
2239 FOR_BB_INSNS (then_bb, tmp_insn)
2240 /* Don't check inside insn_a. We will have changed it to emit_a
2241 with a destination that doesn't conflict. */
2242 if (!(insn_a && tmp_insn == insn_a)
2243 && modified_in_p (orig_b, tmp_insn))
2245 modified_in_a = true;
2246 break;
2251 modified_in_b = emit_b != NULL_RTX && modified_in_p (orig_a, emit_b);
2252 if (tmp_a && else_bb)
2254 FOR_BB_INSNS (else_bb, tmp_insn)
2255 /* Don't check inside insn_b. We will have changed it to emit_b
2256 with a destination that doesn't conflict. */
2257 if (!(insn_b && tmp_insn == insn_b)
2258 && modified_in_p (orig_a, tmp_insn))
2260 modified_in_b = true;
2261 break;
2265 /* If insn to set up A clobbers any registers B depends on, try to
2266 swap insn that sets up A with the one that sets up B. If even
2267 that doesn't help, punt. */
2268 if (modified_in_a && !modified_in_b)
2270 if (!noce_emit_bb (emit_b, else_bb, b_simple))
2271 goto end_seq_and_fail;
2273 if (!noce_emit_bb (emit_a, then_bb, a_simple))
2274 goto end_seq_and_fail;
2276 else if (!modified_in_a)
2278 if (!noce_emit_bb (emit_a, then_bb, a_simple))
2279 goto end_seq_and_fail;
2281 if (!noce_emit_bb (emit_b, else_bb, b_simple))
2282 goto end_seq_and_fail;
2284 else
2285 goto end_seq_and_fail;
2287 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
2288 XEXP (if_info->cond, 1), a, b);
2290 if (! target)
2291 goto end_seq_and_fail;
2293 /* If we're handling a memory for above, emit the load now. */
2294 if (is_mem)
2296 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
2298 /* Copy over flags as appropriate. */
2299 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
2300 MEM_VOLATILE_P (mem) = 1;
2301 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
2302 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
2303 set_mem_align (mem,
2304 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
2306 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
2307 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
2309 noce_emit_move_insn (if_info->x, mem);
2311 else if (target != x)
2312 noce_emit_move_insn (x, target);
2314 ifcvt_seq = end_ifcvt_sequence (if_info);
2315 if (!ifcvt_seq || !noce_conversion_profitable_p (ifcvt_seq, if_info))
2316 return FALSE;
2318 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
2319 INSN_LOCATION (if_info->insn_a));
2320 if_info->transform_name = "noce_try_cmove_arith";
2321 return TRUE;
2323 end_seq_and_fail:
2324 end_sequence ();
2325 return FALSE;
2328 /* For most cases, the simplified condition we found is the best
2329 choice, but this is not the case for the min/max/abs transforms.
2330 For these we wish to know that it is A or B in the condition. */
2332 static rtx
2333 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
2334 rtx_insn **earliest)
2336 rtx cond, set;
2337 rtx_insn *insn;
2338 int reverse;
2340 /* If target is already mentioned in the known condition, return it. */
2341 if (reg_mentioned_p (target, if_info->cond))
2343 *earliest = if_info->cond_earliest;
2344 return if_info->cond;
2347 set = pc_set (if_info->jump);
2348 cond = XEXP (SET_SRC (set), 0);
2349 reverse
2350 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2351 && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
2352 if (if_info->then_else_reversed)
2353 reverse = !reverse;
2355 /* If we're looking for a constant, try to make the conditional
2356 have that constant in it. There are two reasons why it may
2357 not have the constant we want:
2359 1. GCC may have needed to put the constant in a register, because
2360 the target can't compare directly against that constant. For
2361 this case, we look for a SET immediately before the comparison
2362 that puts a constant in that register.
2364 2. GCC may have canonicalized the conditional, for example
2365 replacing "if x < 4" with "if x <= 3". We can undo that (or
2366 make equivalent types of changes) to get the constants we need
2367 if they're off by one in the right direction. */
2369 if (CONST_INT_P (target))
2371 enum rtx_code code = GET_CODE (if_info->cond);
2372 rtx op_a = XEXP (if_info->cond, 0);
2373 rtx op_b = XEXP (if_info->cond, 1);
2374 rtx_insn *prev_insn;
2376 /* First, look to see if we put a constant in a register. */
2377 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
2378 if (prev_insn
2379 && BLOCK_FOR_INSN (prev_insn)
2380 == BLOCK_FOR_INSN (if_info->cond_earliest)
2381 && INSN_P (prev_insn)
2382 && GET_CODE (PATTERN (prev_insn)) == SET)
2384 rtx src = find_reg_equal_equiv_note (prev_insn);
2385 if (!src)
2386 src = SET_SRC (PATTERN (prev_insn));
2387 if (CONST_INT_P (src))
2389 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
2390 op_a = src;
2391 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
2392 op_b = src;
2394 if (CONST_INT_P (op_a))
2396 std::swap (op_a, op_b);
2397 code = swap_condition (code);
2402 /* Now, look to see if we can get the right constant by
2403 adjusting the conditional. */
2404 if (CONST_INT_P (op_b))
2406 HOST_WIDE_INT desired_val = INTVAL (target);
2407 HOST_WIDE_INT actual_val = INTVAL (op_b);
2409 switch (code)
2411 case LT:
2412 if (desired_val != HOST_WIDE_INT_MAX
2413 && actual_val == desired_val + 1)
2415 code = LE;
2416 op_b = GEN_INT (desired_val);
2418 break;
2419 case LE:
2420 if (desired_val != HOST_WIDE_INT_MIN
2421 && actual_val == desired_val - 1)
2423 code = LT;
2424 op_b = GEN_INT (desired_val);
2426 break;
2427 case GT:
2428 if (desired_val != HOST_WIDE_INT_MIN
2429 && actual_val == desired_val - 1)
2431 code = GE;
2432 op_b = GEN_INT (desired_val);
2434 break;
2435 case GE:
2436 if (desired_val != HOST_WIDE_INT_MAX
2437 && actual_val == desired_val + 1)
2439 code = GT;
2440 op_b = GEN_INT (desired_val);
2442 break;
2443 default:
2444 break;
2448 /* If we made any changes, generate a new conditional that is
2449 equivalent to what we started with, but has the right
2450 constants in it. */
2451 if (code != GET_CODE (if_info->cond)
2452 || op_a != XEXP (if_info->cond, 0)
2453 || op_b != XEXP (if_info->cond, 1))
2455 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
2456 *earliest = if_info->cond_earliest;
2457 return cond;
2461 cond = canonicalize_condition (if_info->jump, cond, reverse,
2462 earliest, target, have_cbranchcc4, true);
2463 if (! cond || ! reg_mentioned_p (target, cond))
2464 return NULL;
2466 /* We almost certainly searched back to a different place.
2467 Need to re-verify correct lifetimes. */
2469 /* X may not be mentioned in the range (cond_earliest, jump]. */
2470 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
2471 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
2472 return NULL;
2474 /* A and B may not be modified in the range [cond_earliest, jump). */
2475 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
2476 if (INSN_P (insn)
2477 && (modified_in_p (if_info->a, insn)
2478 || modified_in_p (if_info->b, insn)))
2479 return NULL;
2481 return cond;
2484 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
2486 static int
2487 noce_try_minmax (struct noce_if_info *if_info)
2489 rtx cond, target;
2490 rtx_insn *earliest, *seq;
2491 enum rtx_code code, op;
2492 int unsignedp;
2494 if (!noce_simple_bbs (if_info))
2495 return FALSE;
2497 /* ??? Reject modes with NaNs or signed zeros since we don't know how
2498 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
2499 to get the target to tell us... */
2500 if (HONOR_SIGNED_ZEROS (if_info->x)
2501 || HONOR_NANS (if_info->x))
2502 return FALSE;
2504 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
2505 if (!cond)
2506 return FALSE;
2508 /* Verify the condition is of the form we expect, and canonicalize
2509 the comparison code. */
2510 code = GET_CODE (cond);
2511 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
2513 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
2514 return FALSE;
2516 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
2518 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
2519 return FALSE;
2520 code = swap_condition (code);
2522 else
2523 return FALSE;
2525 /* Determine what sort of operation this is. Note that the code is for
2526 a taken branch, so the code->operation mapping appears backwards. */
2527 switch (code)
2529 case LT:
2530 case LE:
2531 case UNLT:
2532 case UNLE:
2533 op = SMAX;
2534 unsignedp = 0;
2535 break;
2536 case GT:
2537 case GE:
2538 case UNGT:
2539 case UNGE:
2540 op = SMIN;
2541 unsignedp = 0;
2542 break;
2543 case LTU:
2544 case LEU:
2545 op = UMAX;
2546 unsignedp = 1;
2547 break;
2548 case GTU:
2549 case GEU:
2550 op = UMIN;
2551 unsignedp = 1;
2552 break;
2553 default:
2554 return FALSE;
2557 start_sequence ();
2559 target = expand_simple_binop (GET_MODE (if_info->x), op,
2560 if_info->a, if_info->b,
2561 if_info->x, unsignedp, OPTAB_WIDEN);
2562 if (! target)
2564 end_sequence ();
2565 return FALSE;
2567 if (target != if_info->x)
2568 noce_emit_move_insn (if_info->x, target);
2570 seq = end_ifcvt_sequence (if_info);
2571 if (!seq)
2572 return FALSE;
2574 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2575 if_info->cond = cond;
2576 if_info->cond_earliest = earliest;
2577 if_info->transform_name = "noce_try_minmax";
2579 return TRUE;
2582 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2583 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2584 etc. */
2586 static int
2587 noce_try_abs (struct noce_if_info *if_info)
2589 rtx cond, target, a, b, c;
2590 rtx_insn *earliest, *seq;
2591 int negate;
2592 bool one_cmpl = false;
2594 if (!noce_simple_bbs (if_info))
2595 return FALSE;
2597 /* Reject modes with signed zeros. */
2598 if (HONOR_SIGNED_ZEROS (if_info->x))
2599 return FALSE;
2601 /* Recognize A and B as constituting an ABS or NABS. The canonical
2602 form is a branch around the negation, taken when the object is the
2603 first operand of a comparison against 0 that evaluates to true. */
2604 a = if_info->a;
2605 b = if_info->b;
2606 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2607 negate = 0;
2608 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2610 std::swap (a, b);
2611 negate = 1;
2613 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2615 negate = 0;
2616 one_cmpl = true;
2618 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2620 std::swap (a, b);
2621 negate = 1;
2622 one_cmpl = true;
2624 else
2625 return FALSE;
2627 cond = noce_get_alt_condition (if_info, b, &earliest);
2628 if (!cond)
2629 return FALSE;
2631 /* Verify the condition is of the form we expect. */
2632 if (rtx_equal_p (XEXP (cond, 0), b))
2633 c = XEXP (cond, 1);
2634 else if (rtx_equal_p (XEXP (cond, 1), b))
2636 c = XEXP (cond, 0);
2637 negate = !negate;
2639 else
2640 return FALSE;
2642 /* Verify that C is zero. Search one step backward for a
2643 REG_EQUAL note or a simple source if necessary. */
2644 if (REG_P (c))
2646 rtx set;
2647 rtx_insn *insn = prev_nonnote_insn (earliest);
2648 if (insn
2649 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2650 && (set = single_set (insn))
2651 && rtx_equal_p (SET_DEST (set), c))
2653 rtx note = find_reg_equal_equiv_note (insn);
2654 if (note)
2655 c = XEXP (note, 0);
2656 else
2657 c = SET_SRC (set);
2659 else
2660 return FALSE;
2662 if (MEM_P (c)
2663 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2664 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2665 c = get_pool_constant (XEXP (c, 0));
2667 /* Work around funny ideas get_condition has wrt canonicalization.
2668 Note that these rtx constants are known to be CONST_INT, and
2669 therefore imply integer comparisons.
2670 The one_cmpl case is more complicated, as we want to handle
2671 only x < 0 ? ~x : x or x >= 0 ? x : ~x to one_cmpl_abs (x)
2672 and x < 0 ? x : ~x or x >= 0 ? ~x : x to ~one_cmpl_abs (x),
2673 but not other cases (x > -1 is equivalent of x >= 0). */
2674 if (c == constm1_rtx && GET_CODE (cond) == GT)
2676 else if (c == const1_rtx && GET_CODE (cond) == LT)
2678 if (one_cmpl)
2679 return FALSE;
2681 else if (c == CONST0_RTX (GET_MODE (b)))
2683 if (one_cmpl
2684 && GET_CODE (cond) != GE
2685 && GET_CODE (cond) != LT)
2686 return FALSE;
2688 else
2689 return FALSE;
2691 /* Determine what sort of operation this is. */
2692 switch (GET_CODE (cond))
2694 case LT:
2695 case LE:
2696 case UNLT:
2697 case UNLE:
2698 negate = !negate;
2699 break;
2700 case GT:
2701 case GE:
2702 case UNGT:
2703 case UNGE:
2704 break;
2705 default:
2706 return FALSE;
2709 start_sequence ();
2710 if (one_cmpl)
2711 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2712 if_info->x);
2713 else
2714 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2716 /* ??? It's a quandary whether cmove would be better here, especially
2717 for integers. Perhaps combine will clean things up. */
2718 if (target && negate)
2720 if (one_cmpl)
2721 target = expand_simple_unop (GET_MODE (target), NOT, target,
2722 if_info->x, 0);
2723 else
2724 target = expand_simple_unop (GET_MODE (target), NEG, target,
2725 if_info->x, 0);
2728 if (! target)
2730 end_sequence ();
2731 return FALSE;
2734 if (target != if_info->x)
2735 noce_emit_move_insn (if_info->x, target);
2737 seq = end_ifcvt_sequence (if_info);
2738 if (!seq)
2739 return FALSE;
2741 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2742 if_info->cond = cond;
2743 if_info->cond_earliest = earliest;
2744 if_info->transform_name = "noce_try_abs";
2746 return TRUE;
2749 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2751 static int
2752 noce_try_sign_mask (struct noce_if_info *if_info)
2754 rtx cond, t, m, c;
2755 rtx_insn *seq;
2756 machine_mode mode;
2757 enum rtx_code code;
2758 bool t_unconditional;
2760 if (!noce_simple_bbs (if_info))
2761 return FALSE;
2763 cond = if_info->cond;
2764 code = GET_CODE (cond);
2765 m = XEXP (cond, 0);
2766 c = XEXP (cond, 1);
2768 t = NULL_RTX;
2769 if (if_info->a == const0_rtx)
2771 if ((code == LT && c == const0_rtx)
2772 || (code == LE && c == constm1_rtx))
2773 t = if_info->b;
2775 else if (if_info->b == const0_rtx)
2777 if ((code == GE && c == const0_rtx)
2778 || (code == GT && c == constm1_rtx))
2779 t = if_info->a;
2782 if (! t || side_effects_p (t))
2783 return FALSE;
2785 /* We currently don't handle different modes. */
2786 mode = GET_MODE (t);
2787 if (GET_MODE (m) != mode)
2788 return FALSE;
2790 /* This is only profitable if T is unconditionally executed/evaluated in the
2791 original insn sequence or T is cheap. The former happens if B is the
2792 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2793 INSN_B which can happen for e.g. conditional stores to memory. For the
2794 cost computation use the block TEST_BB where the evaluation will end up
2795 after the transformation. */
2796 t_unconditional =
2797 (t == if_info->b
2798 && (if_info->insn_b == NULL_RTX
2799 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2800 if (!(t_unconditional
2801 || (set_src_cost (t, mode, if_info->speed_p)
2802 < COSTS_N_INSNS (2))))
2803 return FALSE;
2805 start_sequence ();
2806 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2807 "(signed) m >> 31" directly. This benefits targets with specialized
2808 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2809 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2810 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2811 : NULL_RTX;
2813 if (!t)
2815 end_sequence ();
2816 return FALSE;
2819 noce_emit_move_insn (if_info->x, t);
2821 seq = end_ifcvt_sequence (if_info);
2822 if (!seq)
2823 return FALSE;
2825 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2826 if_info->transform_name = "noce_try_sign_mask";
2828 return TRUE;
2832 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2833 transformations. */
2835 static int
2836 noce_try_bitop (struct noce_if_info *if_info)
2838 rtx cond, x, a, result;
2839 rtx_insn *seq;
2840 machine_mode mode;
2841 enum rtx_code code;
2842 int bitnum;
2844 x = if_info->x;
2845 cond = if_info->cond;
2846 code = GET_CODE (cond);
2848 if (!noce_simple_bbs (if_info))
2849 return FALSE;
2851 /* Check for no else condition. */
2852 if (! rtx_equal_p (x, if_info->b))
2853 return FALSE;
2855 /* Check for a suitable condition. */
2856 if (code != NE && code != EQ)
2857 return FALSE;
2858 if (XEXP (cond, 1) != const0_rtx)
2859 return FALSE;
2860 cond = XEXP (cond, 0);
2862 /* ??? We could also handle AND here. */
2863 if (GET_CODE (cond) == ZERO_EXTRACT)
2865 if (XEXP (cond, 1) != const1_rtx
2866 || !CONST_INT_P (XEXP (cond, 2))
2867 || ! rtx_equal_p (x, XEXP (cond, 0)))
2868 return FALSE;
2869 bitnum = INTVAL (XEXP (cond, 2));
2870 mode = GET_MODE (x);
2871 if (BITS_BIG_ENDIAN)
2872 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2873 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2874 return FALSE;
2876 else
2877 return FALSE;
2879 a = if_info->a;
2880 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2882 /* Check for "if (X & C) x = x op C". */
2883 if (! rtx_equal_p (x, XEXP (a, 0))
2884 || !CONST_INT_P (XEXP (a, 1))
2885 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2886 != HOST_WIDE_INT_1U << bitnum)
2887 return FALSE;
2889 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2890 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2891 if (GET_CODE (a) == IOR)
2892 result = (code == NE) ? a : NULL_RTX;
2893 else if (code == NE)
2895 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2896 result = gen_int_mode (HOST_WIDE_INT_1 << bitnum, mode);
2897 result = simplify_gen_binary (IOR, mode, x, result);
2899 else
2901 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2902 result = gen_int_mode (~(HOST_WIDE_INT_1 << bitnum), mode);
2903 result = simplify_gen_binary (AND, mode, x, result);
2906 else if (GET_CODE (a) == AND)
2908 /* Check for "if (X & C) x &= ~C". */
2909 if (! rtx_equal_p (x, XEXP (a, 0))
2910 || !CONST_INT_P (XEXP (a, 1))
2911 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2912 != (~(HOST_WIDE_INT_1 << bitnum) & GET_MODE_MASK (mode)))
2913 return FALSE;
2915 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2916 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2917 result = (code == EQ) ? a : NULL_RTX;
2919 else
2920 return FALSE;
2922 if (result)
2924 start_sequence ();
2925 noce_emit_move_insn (x, result);
2926 seq = end_ifcvt_sequence (if_info);
2927 if (!seq)
2928 return FALSE;
2930 emit_insn_before_setloc (seq, if_info->jump,
2931 INSN_LOCATION (if_info->insn_a));
2933 if_info->transform_name = "noce_try_bitop";
2934 return TRUE;
2938 /* Similar to get_condition, only the resulting condition must be
2939 valid at JUMP, instead of at EARLIEST.
2941 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2942 THEN block of the caller, and we have to reverse the condition. */
2944 static rtx
2945 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2947 rtx cond, set, tmp;
2948 bool reverse;
2950 if (! any_condjump_p (jump))
2951 return NULL_RTX;
2953 set = pc_set (jump);
2955 /* If this branches to JUMP_LABEL when the condition is false,
2956 reverse the condition. */
2957 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2958 && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2960 /* We may have to reverse because the caller's if block is not canonical,
2961 i.e. the THEN block isn't the fallthrough block for the TEST block
2962 (see find_if_header). */
2963 if (then_else_reversed)
2964 reverse = !reverse;
2966 /* If the condition variable is a register and is MODE_INT, accept it. */
2968 cond = XEXP (SET_SRC (set), 0);
2969 tmp = XEXP (cond, 0);
2970 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2971 && (GET_MODE (tmp) != BImode
2972 || !targetm.small_register_classes_for_mode_p (BImode)))
2974 *earliest = jump;
2976 if (reverse)
2977 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2978 GET_MODE (cond), tmp, XEXP (cond, 1));
2979 return cond;
2982 /* Otherwise, fall back on canonicalize_condition to do the dirty
2983 work of manipulating MODE_CC values and COMPARE rtx codes. */
2984 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2985 NULL_RTX, have_cbranchcc4, true);
2987 /* We don't handle side-effects in the condition, like handling
2988 REG_INC notes and making sure no duplicate conditions are emitted. */
2989 if (tmp != NULL_RTX && side_effects_p (tmp))
2990 return NULL_RTX;
2992 return tmp;
2995 /* Return true if OP is ok for if-then-else processing. */
2997 static int
2998 noce_operand_ok (const_rtx op)
3000 if (side_effects_p (op))
3001 return FALSE;
3003 /* We special-case memories, so handle any of them with
3004 no address side effects. */
3005 if (MEM_P (op))
3006 return ! side_effects_p (XEXP (op, 0));
3008 return ! may_trap_p (op);
3011 /* Return true if X contains a MEM subrtx. */
3013 static bool
3014 contains_mem_rtx_p (rtx x)
3016 subrtx_iterator::array_type array;
3017 FOR_EACH_SUBRTX (iter, array, x, ALL)
3018 if (MEM_P (*iter))
3019 return true;
3021 return false;
3024 /* Return true iff basic block TEST_BB is valid for noce if-conversion.
3025 The condition used in this if-conversion is in COND.
3026 In practice, check that TEST_BB ends with a single set
3027 x := a and all previous computations
3028 in TEST_BB don't produce any values that are live after TEST_BB.
3029 In other words, all the insns in TEST_BB are there only
3030 to compute a value for x. Add the rtx cost of the insns
3031 in TEST_BB to COST. Record whether TEST_BB is a single simple
3032 set instruction in SIMPLE_P. */
3034 static bool
3035 bb_valid_for_noce_process_p (basic_block test_bb, rtx cond,
3036 unsigned int *cost, bool *simple_p)
3038 if (!test_bb)
3039 return false;
3041 rtx_insn *last_insn = last_active_insn (test_bb, FALSE);
3042 rtx last_set = NULL_RTX;
3044 rtx cc = cc_in_cond (cond);
3046 if (!insn_valid_noce_process_p (last_insn, cc))
3047 return false;
3048 last_set = single_set (last_insn);
3050 rtx x = SET_DEST (last_set);
3051 rtx_insn *first_insn = first_active_insn (test_bb);
3052 rtx first_set = single_set (first_insn);
3054 if (!first_set)
3055 return false;
3057 /* We have a single simple set, that's okay. */
3058 bool speed_p = optimize_bb_for_speed_p (test_bb);
3060 if (first_insn == last_insn)
3062 *simple_p = noce_operand_ok (SET_DEST (first_set));
3063 *cost += insn_rtx_cost (first_set, speed_p);
3064 return *simple_p;
3067 rtx_insn *prev_last_insn = PREV_INSN (last_insn);
3068 gcc_assert (prev_last_insn);
3070 /* For now, disallow setting x multiple times in test_bb. */
3071 if (REG_P (x) && reg_set_between_p (x, first_insn, prev_last_insn))
3072 return false;
3074 bitmap test_bb_temps = BITMAP_ALLOC (&reg_obstack);
3076 /* The regs that are live out of test_bb. */
3077 bitmap test_bb_live_out = df_get_live_out (test_bb);
3079 int potential_cost = insn_rtx_cost (last_set, speed_p);
3080 rtx_insn *insn;
3081 FOR_BB_INSNS (test_bb, insn)
3083 if (insn != last_insn)
3085 if (!active_insn_p (insn))
3086 continue;
3088 if (!insn_valid_noce_process_p (insn, cc))
3089 goto free_bitmap_and_fail;
3091 rtx sset = single_set (insn);
3092 gcc_assert (sset);
3094 if (contains_mem_rtx_p (SET_SRC (sset))
3095 || !REG_P (SET_DEST (sset))
3096 || reg_overlap_mentioned_p (SET_DEST (sset), cond))
3097 goto free_bitmap_and_fail;
3099 potential_cost += insn_rtx_cost (sset, speed_p);
3100 bitmap_set_bit (test_bb_temps, REGNO (SET_DEST (sset)));
3104 /* If any of the intermediate results in test_bb are live after test_bb
3105 then fail. */
3106 if (bitmap_intersect_p (test_bb_live_out, test_bb_temps))
3107 goto free_bitmap_and_fail;
3109 BITMAP_FREE (test_bb_temps);
3110 *cost += potential_cost;
3111 *simple_p = false;
3112 return true;
3114 free_bitmap_and_fail:
3115 BITMAP_FREE (test_bb_temps);
3116 return false;
3119 /* We have something like:
3121 if (x > y)
3122 { i = a; j = b; k = c; }
3124 Make it:
3126 tmp_i = (x > y) ? a : i;
3127 tmp_j = (x > y) ? b : j;
3128 tmp_k = (x > y) ? c : k;
3129 i = tmp_i;
3130 j = tmp_j;
3131 k = tmp_k;
3133 Subsequent passes are expected to clean up the extra moves.
3135 Look for special cases such as writes to one register which are
3136 read back in another SET, as might occur in a swap idiom or
3137 similar.
3139 These look like:
3141 if (x > y)
3142 i = a;
3143 j = i;
3145 Which we want to rewrite to:
3147 tmp_i = (x > y) ? a : i;
3148 tmp_j = (x > y) ? tmp_i : j;
3149 i = tmp_i;
3150 j = tmp_j;
3152 We can catch these when looking at (SET x y) by keeping a list of the
3153 registers we would have targeted before if-conversion and looking back
3154 through it for an overlap with Y. If we find one, we rewire the
3155 conditional set to use the temporary we introduced earlier.
3157 IF_INFO contains the useful information about the block structure and
3158 jump instructions. */
3160 static int
3161 noce_convert_multiple_sets (struct noce_if_info *if_info)
3163 basic_block test_bb = if_info->test_bb;
3164 basic_block then_bb = if_info->then_bb;
3165 basic_block join_bb = if_info->join_bb;
3166 rtx_insn *jump = if_info->jump;
3167 rtx_insn *cond_earliest;
3168 rtx_insn *insn;
3170 start_sequence ();
3172 /* Decompose the condition attached to the jump. */
3173 rtx cond = noce_get_condition (jump, &cond_earliest, false);
3174 rtx x = XEXP (cond, 0);
3175 rtx y = XEXP (cond, 1);
3176 rtx_code cond_code = GET_CODE (cond);
3178 /* The true targets for a conditional move. */
3179 auto_vec<rtx> targets;
3180 /* The temporaries introduced to allow us to not consider register
3181 overlap. */
3182 auto_vec<rtx> temporaries;
3183 /* The insns we've emitted. */
3184 auto_vec<rtx_insn *> unmodified_insns;
3185 int count = 0;
3187 FOR_BB_INSNS (then_bb, insn)
3189 /* Skip over non-insns. */
3190 if (!active_insn_p (insn))
3191 continue;
3193 rtx set = single_set (insn);
3194 gcc_checking_assert (set);
3196 rtx target = SET_DEST (set);
3197 rtx temp = gen_reg_rtx (GET_MODE (target));
3198 rtx new_val = SET_SRC (set);
3199 rtx old_val = target;
3201 /* If we were supposed to read from an earlier write in this block,
3202 we've changed the register allocation. Rewire the read. While
3203 we are looking, also try to catch a swap idiom. */
3204 for (int i = count - 1; i >= 0; --i)
3205 if (reg_overlap_mentioned_p (new_val, targets[i]))
3207 /* Catch a "swap" style idiom. */
3208 if (find_reg_note (insn, REG_DEAD, new_val) != NULL_RTX)
3209 /* The write to targets[i] is only live until the read
3210 here. As the condition codes match, we can propagate
3211 the set to here. */
3212 new_val = SET_SRC (single_set (unmodified_insns[i]));
3213 else
3214 new_val = temporaries[i];
3215 break;
3218 /* If we had a non-canonical conditional jump (i.e. one where
3219 the fallthrough is to the "else" case) we need to reverse
3220 the conditional select. */
3221 if (if_info->then_else_reversed)
3222 std::swap (old_val, new_val);
3225 /* We allow simple lowpart register subreg SET sources in
3226 bb_ok_for_noce_convert_multiple_sets. Be careful when processing
3227 sequences like:
3228 (set (reg:SI r1) (reg:SI r2))
3229 (set (reg:HI r3) (subreg:HI (r1)))
3230 For the second insn new_val or old_val (r1 in this example) will be
3231 taken from the temporaries and have the wider mode which will not
3232 match with the mode of the other source of the conditional move, so
3233 we'll end up trying to emit r4:HI = cond ? (r1:SI) : (r3:HI).
3234 Wrap the two cmove operands into subregs if appropriate to prevent
3235 that. */
3236 if (GET_MODE (new_val) != GET_MODE (temp))
3238 machine_mode src_mode = GET_MODE (new_val);
3239 machine_mode dst_mode = GET_MODE (temp);
3240 if (GET_MODE_SIZE (src_mode) <= GET_MODE_SIZE (dst_mode))
3242 end_sequence ();
3243 return FALSE;
3245 new_val = lowpart_subreg (dst_mode, new_val, src_mode);
3247 if (GET_MODE (old_val) != GET_MODE (temp))
3249 machine_mode src_mode = GET_MODE (old_val);
3250 machine_mode dst_mode = GET_MODE (temp);
3251 if (GET_MODE_SIZE (src_mode) <= GET_MODE_SIZE (dst_mode))
3253 end_sequence ();
3254 return FALSE;
3256 old_val = lowpart_subreg (dst_mode, old_val, src_mode);
3259 /* Actually emit the conditional move. */
3260 rtx temp_dest = noce_emit_cmove (if_info, temp, cond_code,
3261 x, y, new_val, old_val);
3263 /* If we failed to expand the conditional move, drop out and don't
3264 try to continue. */
3265 if (temp_dest == NULL_RTX)
3267 end_sequence ();
3268 return FALSE;
3271 /* Bookkeeping. */
3272 count++;
3273 targets.safe_push (target);
3274 temporaries.safe_push (temp_dest);
3275 unmodified_insns.safe_push (insn);
3278 /* We must have seen some sort of insn to insert, otherwise we were
3279 given an empty BB to convert, and we can't handle that. */
3280 gcc_assert (!unmodified_insns.is_empty ());
3282 /* Now fixup the assignments. */
3283 for (int i = 0; i < count; i++)
3284 noce_emit_move_insn (targets[i], temporaries[i]);
3286 /* Actually emit the sequence if it isn't too expensive. */
3287 rtx_insn *seq = get_insns ();
3289 if (!noce_conversion_profitable_p (seq, if_info))
3291 end_sequence ();
3292 return FALSE;
3295 for (insn = seq; insn; insn = NEXT_INSN (insn))
3296 set_used_flags (insn);
3298 /* Mark all our temporaries and targets as used. */
3299 for (int i = 0; i < count; i++)
3301 set_used_flags (temporaries[i]);
3302 set_used_flags (targets[i]);
3305 set_used_flags (cond);
3306 set_used_flags (x);
3307 set_used_flags (y);
3309 unshare_all_rtl_in_chain (seq);
3310 end_sequence ();
3312 if (!seq)
3313 return FALSE;
3315 for (insn = seq; insn; insn = NEXT_INSN (insn))
3316 if (JUMP_P (insn)
3317 || recog_memoized (insn) == -1)
3318 return FALSE;
3320 emit_insn_before_setloc (seq, if_info->jump,
3321 INSN_LOCATION (unmodified_insns.last ()));
3323 /* Clean up THEN_BB and the edges in and out of it. */
3324 remove_edge (find_edge (test_bb, join_bb));
3325 remove_edge (find_edge (then_bb, join_bb));
3326 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3327 delete_basic_block (then_bb);
3328 num_true_changes++;
3330 /* Maybe merge blocks now the jump is simple enough. */
3331 if (can_merge_blocks_p (test_bb, join_bb))
3333 merge_blocks (test_bb, join_bb);
3334 num_true_changes++;
3337 num_updated_if_blocks++;
3338 if_info->transform_name = "noce_convert_multiple_sets";
3339 return TRUE;
3342 /* Return true iff basic block TEST_BB is comprised of only
3343 (SET (REG) (REG)) insns suitable for conversion to a series
3344 of conditional moves. Also check that we have more than one set
3345 (other routines can handle a single set better than we would), and
3346 fewer than PARAM_MAX_RTL_IF_CONVERSION_INSNS sets. */
3348 static bool
3349 bb_ok_for_noce_convert_multiple_sets (basic_block test_bb)
3351 rtx_insn *insn;
3352 unsigned count = 0;
3353 unsigned param = PARAM_VALUE (PARAM_MAX_RTL_IF_CONVERSION_INSNS);
3355 FOR_BB_INSNS (test_bb, insn)
3357 /* Skip over notes etc. */
3358 if (!active_insn_p (insn))
3359 continue;
3361 /* We only handle SET insns. */
3362 rtx set = single_set (insn);
3363 if (set == NULL_RTX)
3364 return false;
3366 rtx dest = SET_DEST (set);
3367 rtx src = SET_SRC (set);
3369 /* We can possibly relax this, but for now only handle REG to REG
3370 (including subreg) moves. This avoids any issues that might come
3371 from introducing loads/stores that might violate data-race-freedom
3372 guarantees. */
3373 if (!REG_P (dest))
3374 return false;
3376 if (!(REG_P (src)
3377 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
3378 && subreg_lowpart_p (src))))
3379 return false;
3381 /* Destination must be appropriate for a conditional write. */
3382 if (!noce_operand_ok (dest))
3383 return false;
3385 /* We must be able to conditionally move in this mode. */
3386 if (!can_conditionally_move_p (GET_MODE (dest)))
3387 return false;
3389 count++;
3392 /* If we would only put out one conditional move, the other strategies
3393 this pass tries are better optimized and will be more appropriate.
3394 Some targets want to strictly limit the number of conditional moves
3395 that are emitted, they set this through PARAM, we need to respect
3396 that. */
3397 return count > 1 && count <= param;
3400 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3401 it without using conditional execution. Return TRUE if we were successful
3402 at converting the block. */
3404 static int
3405 noce_process_if_block (struct noce_if_info *if_info)
3407 basic_block test_bb = if_info->test_bb; /* test block */
3408 basic_block then_bb = if_info->then_bb; /* THEN */
3409 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
3410 basic_block join_bb = if_info->join_bb; /* JOIN */
3411 rtx_insn *jump = if_info->jump;
3412 rtx cond = if_info->cond;
3413 rtx_insn *insn_a, *insn_b;
3414 rtx set_a, set_b;
3415 rtx orig_x, x, a, b;
3417 /* We're looking for patterns of the form
3419 (1) if (...) x = a; else x = b;
3420 (2) x = b; if (...) x = a;
3421 (3) if (...) x = a; // as if with an initial x = x.
3422 (4) if (...) { x = a; y = b; z = c; } // Like 3, for multiple SETS.
3423 The later patterns require jumps to be more expensive.
3424 For the if (...) x = a; else x = b; case we allow multiple insns
3425 inside the then and else blocks as long as their only effect is
3426 to calculate a value for x.
3427 ??? For future expansion, further expand the "multiple X" rules. */
3429 /* First look for multiple SETS. */
3430 if (!else_bb
3431 && HAVE_conditional_move
3432 && !HAVE_cc0
3433 && bb_ok_for_noce_convert_multiple_sets (then_bb))
3435 if (noce_convert_multiple_sets (if_info))
3437 if (dump_file && if_info->transform_name)
3438 fprintf (dump_file, "if-conversion succeeded through %s\n",
3439 if_info->transform_name);
3440 return TRUE;
3444 if (! bb_valid_for_noce_process_p (then_bb, cond, &if_info->original_cost,
3445 &if_info->then_simple))
3446 return false;
3448 if (else_bb
3449 && ! bb_valid_for_noce_process_p (else_bb, cond, &if_info->original_cost,
3450 &if_info->else_simple))
3451 return false;
3453 insn_a = last_active_insn (then_bb, FALSE);
3454 set_a = single_set (insn_a);
3455 gcc_assert (set_a);
3457 x = SET_DEST (set_a);
3458 a = SET_SRC (set_a);
3460 /* Look for the other potential set. Make sure we've got equivalent
3461 destinations. */
3462 /* ??? This is overconservative. Storing to two different mems is
3463 as easy as conditionally computing the address. Storing to a
3464 single mem merely requires a scratch memory to use as one of the
3465 destination addresses; often the memory immediately below the
3466 stack pointer is available for this. */
3467 set_b = NULL_RTX;
3468 if (else_bb)
3470 insn_b = last_active_insn (else_bb, FALSE);
3471 set_b = single_set (insn_b);
3472 gcc_assert (set_b);
3474 if (!rtx_interchangeable_p (x, SET_DEST (set_b)))
3475 return FALSE;
3477 else
3479 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
3480 /* We're going to be moving the evaluation of B down from above
3481 COND_EARLIEST to JUMP. Make sure the relevant data is still
3482 intact. */
3483 if (! insn_b
3484 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
3485 || !NONJUMP_INSN_P (insn_b)
3486 || (set_b = single_set (insn_b)) == NULL_RTX
3487 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
3488 || ! noce_operand_ok (SET_SRC (set_b))
3489 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
3490 || modified_between_p (SET_SRC (set_b), insn_b, jump)
3491 /* Avoid extending the lifetime of hard registers on small
3492 register class machines. */
3493 || (REG_P (SET_SRC (set_b))
3494 && HARD_REGISTER_P (SET_SRC (set_b))
3495 && targetm.small_register_classes_for_mode_p
3496 (GET_MODE (SET_SRC (set_b))))
3497 /* Likewise with X. In particular this can happen when
3498 noce_get_condition looks farther back in the instruction
3499 stream than one might expect. */
3500 || reg_overlap_mentioned_p (x, cond)
3501 || reg_overlap_mentioned_p (x, a)
3502 || modified_between_p (x, insn_b, jump))
3504 insn_b = NULL;
3505 set_b = NULL_RTX;
3509 /* If x has side effects then only the if-then-else form is safe to
3510 convert. But even in that case we would need to restore any notes
3511 (such as REG_INC) at then end. That can be tricky if
3512 noce_emit_move_insn expands to more than one insn, so disable the
3513 optimization entirely for now if there are side effects. */
3514 if (side_effects_p (x))
3515 return FALSE;
3517 b = (set_b ? SET_SRC (set_b) : x);
3519 /* Only operate on register destinations, and even then avoid extending
3520 the lifetime of hard registers on small register class machines. */
3521 orig_x = x;
3522 if_info->orig_x = orig_x;
3523 if (!REG_P (x)
3524 || (HARD_REGISTER_P (x)
3525 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
3527 if (GET_MODE (x) == BLKmode)
3528 return FALSE;
3530 if (GET_CODE (x) == ZERO_EXTRACT
3531 && (!CONST_INT_P (XEXP (x, 1))
3532 || !CONST_INT_P (XEXP (x, 2))))
3533 return FALSE;
3535 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
3536 ? XEXP (x, 0) : x));
3539 /* Don't operate on sources that may trap or are volatile. */
3540 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
3541 return FALSE;
3543 retry:
3544 /* Set up the info block for our subroutines. */
3545 if_info->insn_a = insn_a;
3546 if_info->insn_b = insn_b;
3547 if_info->x = x;
3548 if_info->a = a;
3549 if_info->b = b;
3551 /* Try optimizations in some approximation of a useful order. */
3552 /* ??? Should first look to see if X is live incoming at all. If it
3553 isn't, we don't need anything but an unconditional set. */
3555 /* Look and see if A and B are really the same. Avoid creating silly
3556 cmove constructs that no one will fix up later. */
3557 if (noce_simple_bbs (if_info)
3558 && rtx_interchangeable_p (a, b))
3560 /* If we have an INSN_B, we don't have to create any new rtl. Just
3561 move the instruction that we already have. If we don't have an
3562 INSN_B, that means that A == X, and we've got a noop move. In
3563 that case don't do anything and let the code below delete INSN_A. */
3564 if (insn_b && else_bb)
3566 rtx note;
3568 if (else_bb && insn_b == BB_END (else_bb))
3569 BB_END (else_bb) = PREV_INSN (insn_b);
3570 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
3572 /* If there was a REG_EQUAL note, delete it since it may have been
3573 true due to this insn being after a jump. */
3574 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
3575 remove_note (insn_b, note);
3577 insn_b = NULL;
3579 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
3580 x must be executed twice. */
3581 else if (insn_b && side_effects_p (orig_x))
3582 return FALSE;
3584 x = orig_x;
3585 goto success;
3588 if (!set_b && MEM_P (orig_x))
3589 /* We want to avoid store speculation to avoid cases like
3590 if (pthread_mutex_trylock(mutex))
3591 ++global_variable;
3592 Rather than go to much effort here, we rely on the SSA optimizers,
3593 which do a good enough job these days. */
3594 return FALSE;
3596 if (noce_try_move (if_info))
3597 goto success;
3598 if (noce_try_ifelse_collapse (if_info))
3599 goto success;
3600 if (noce_try_store_flag (if_info))
3601 goto success;
3602 if (noce_try_bitop (if_info))
3603 goto success;
3604 if (noce_try_minmax (if_info))
3605 goto success;
3606 if (noce_try_abs (if_info))
3607 goto success;
3608 if (noce_try_inverse_constants (if_info))
3609 goto success;
3610 if (!targetm.have_conditional_execution ()
3611 && noce_try_store_flag_constants (if_info))
3612 goto success;
3613 if (HAVE_conditional_move
3614 && noce_try_cmove (if_info))
3615 goto success;
3616 if (! targetm.have_conditional_execution ())
3618 if (noce_try_addcc (if_info))
3619 goto success;
3620 if (noce_try_store_flag_mask (if_info))
3621 goto success;
3622 if (HAVE_conditional_move
3623 && noce_try_cmove_arith (if_info))
3624 goto success;
3625 if (noce_try_sign_mask (if_info))
3626 goto success;
3629 if (!else_bb && set_b)
3631 insn_b = NULL;
3632 set_b = NULL_RTX;
3633 b = orig_x;
3634 goto retry;
3637 return FALSE;
3639 success:
3640 if (dump_file && if_info->transform_name)
3641 fprintf (dump_file, "if-conversion succeeded through %s\n",
3642 if_info->transform_name);
3644 /* If we used a temporary, fix it up now. */
3645 if (orig_x != x)
3647 rtx_insn *seq;
3649 start_sequence ();
3650 noce_emit_move_insn (orig_x, x);
3651 seq = get_insns ();
3652 set_used_flags (orig_x);
3653 unshare_all_rtl_in_chain (seq);
3654 end_sequence ();
3656 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
3659 /* The original THEN and ELSE blocks may now be removed. The test block
3660 must now jump to the join block. If the test block and the join block
3661 can be merged, do so. */
3662 if (else_bb)
3664 delete_basic_block (else_bb);
3665 num_true_changes++;
3667 else
3668 remove_edge (find_edge (test_bb, join_bb));
3670 remove_edge (find_edge (then_bb, join_bb));
3671 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3672 delete_basic_block (then_bb);
3673 num_true_changes++;
3675 if (can_merge_blocks_p (test_bb, join_bb))
3677 merge_blocks (test_bb, join_bb);
3678 num_true_changes++;
3681 num_updated_if_blocks++;
3682 return TRUE;
3685 /* Check whether a block is suitable for conditional move conversion.
3686 Every insn must be a simple set of a register to a constant or a
3687 register. For each assignment, store the value in the pointer map
3688 VALS, keyed indexed by register pointer, then store the register
3689 pointer in REGS. COND is the condition we will test. */
3691 static int
3692 check_cond_move_block (basic_block bb,
3693 hash_map<rtx, rtx> *vals,
3694 vec<rtx> *regs,
3695 rtx cond)
3697 rtx_insn *insn;
3698 rtx cc = cc_in_cond (cond);
3700 /* We can only handle simple jumps at the end of the basic block.
3701 It is almost impossible to update the CFG otherwise. */
3702 insn = BB_END (bb);
3703 if (JUMP_P (insn) && !onlyjump_p (insn))
3704 return FALSE;
3706 FOR_BB_INSNS (bb, insn)
3708 rtx set, dest, src;
3710 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3711 continue;
3712 set = single_set (insn);
3713 if (!set)
3714 return FALSE;
3716 dest = SET_DEST (set);
3717 src = SET_SRC (set);
3718 if (!REG_P (dest)
3719 || (HARD_REGISTER_P (dest)
3720 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
3721 return FALSE;
3723 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
3724 return FALSE;
3726 if (side_effects_p (src) || side_effects_p (dest))
3727 return FALSE;
3729 if (may_trap_p (src) || may_trap_p (dest))
3730 return FALSE;
3732 /* Don't try to handle this if the source register was
3733 modified earlier in the block. */
3734 if ((REG_P (src)
3735 && vals->get (src))
3736 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
3737 && vals->get (SUBREG_REG (src))))
3738 return FALSE;
3740 /* Don't try to handle this if the destination register was
3741 modified earlier in the block. */
3742 if (vals->get (dest))
3743 return FALSE;
3745 /* Don't try to handle this if the condition uses the
3746 destination register. */
3747 if (reg_overlap_mentioned_p (dest, cond))
3748 return FALSE;
3750 /* Don't try to handle this if the source register is modified
3751 later in the block. */
3752 if (!CONSTANT_P (src)
3753 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
3754 return FALSE;
3756 /* Skip it if the instruction to be moved might clobber CC. */
3757 if (cc && set_of (cc, insn))
3758 return FALSE;
3760 vals->put (dest, src);
3762 regs->safe_push (dest);
3765 return TRUE;
3768 /* Given a basic block BB suitable for conditional move conversion,
3769 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
3770 the register values depending on COND, emit the insns in the block as
3771 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
3772 processed. The caller has started a sequence for the conversion.
3773 Return true if successful, false if something goes wrong. */
3775 static bool
3776 cond_move_convert_if_block (struct noce_if_info *if_infop,
3777 basic_block bb, rtx cond,
3778 hash_map<rtx, rtx> *then_vals,
3779 hash_map<rtx, rtx> *else_vals,
3780 bool else_block_p)
3782 enum rtx_code code;
3783 rtx_insn *insn;
3784 rtx cond_arg0, cond_arg1;
3786 code = GET_CODE (cond);
3787 cond_arg0 = XEXP (cond, 0);
3788 cond_arg1 = XEXP (cond, 1);
3790 FOR_BB_INSNS (bb, insn)
3792 rtx set, target, dest, t, e;
3794 /* ??? Maybe emit conditional debug insn? */
3795 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3796 continue;
3797 set = single_set (insn);
3798 gcc_assert (set && REG_P (SET_DEST (set)));
3800 dest = SET_DEST (set);
3802 rtx *then_slot = then_vals->get (dest);
3803 rtx *else_slot = else_vals->get (dest);
3804 t = then_slot ? *then_slot : NULL_RTX;
3805 e = else_slot ? *else_slot : NULL_RTX;
3807 if (else_block_p)
3809 /* If this register was set in the then block, we already
3810 handled this case there. */
3811 if (t)
3812 continue;
3813 t = dest;
3814 gcc_assert (e);
3816 else
3818 gcc_assert (t);
3819 if (!e)
3820 e = dest;
3823 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
3824 t, e);
3825 if (!target)
3826 return false;
3828 if (target != dest)
3829 noce_emit_move_insn (dest, target);
3832 return true;
3835 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3836 it using only conditional moves. Return TRUE if we were successful at
3837 converting the block. */
3839 static int
3840 cond_move_process_if_block (struct noce_if_info *if_info)
3842 basic_block test_bb = if_info->test_bb;
3843 basic_block then_bb = if_info->then_bb;
3844 basic_block else_bb = if_info->else_bb;
3845 basic_block join_bb = if_info->join_bb;
3846 rtx_insn *jump = if_info->jump;
3847 rtx cond = if_info->cond;
3848 rtx_insn *seq, *loc_insn;
3849 rtx reg;
3850 int c;
3851 vec<rtx> then_regs = vNULL;
3852 vec<rtx> else_regs = vNULL;
3853 unsigned int i;
3854 int success_p = FALSE;
3855 int limit = PARAM_VALUE (PARAM_MAX_RTL_IF_CONVERSION_INSNS);
3857 /* Build a mapping for each block to the value used for each
3858 register. */
3859 hash_map<rtx, rtx> then_vals;
3860 hash_map<rtx, rtx> else_vals;
3862 /* Make sure the blocks are suitable. */
3863 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
3864 || (else_bb
3865 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
3866 goto done;
3868 /* Make sure the blocks can be used together. If the same register
3869 is set in both blocks, and is not set to a constant in both
3870 cases, then both blocks must set it to the same register. We
3871 have already verified that if it is set to a register, that the
3872 source register does not change after the assignment. Also count
3873 the number of registers set in only one of the blocks. */
3874 c = 0;
3875 FOR_EACH_VEC_ELT (then_regs, i, reg)
3877 rtx *then_slot = then_vals.get (reg);
3878 rtx *else_slot = else_vals.get (reg);
3880 gcc_checking_assert (then_slot);
3881 if (!else_slot)
3882 ++c;
3883 else
3885 rtx then_val = *then_slot;
3886 rtx else_val = *else_slot;
3887 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3888 && !rtx_equal_p (then_val, else_val))
3889 goto done;
3893 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3894 FOR_EACH_VEC_ELT (else_regs, i, reg)
3896 gcc_checking_assert (else_vals.get (reg));
3897 if (!then_vals.get (reg))
3898 ++c;
3901 /* Make sure it is reasonable to convert this block. What matters
3902 is the number of assignments currently made in only one of the
3903 branches, since if we convert we are going to always execute
3904 them. */
3905 if (c > MAX_CONDITIONAL_EXECUTE
3906 || c > limit)
3907 goto done;
3909 /* Try to emit the conditional moves. First do the then block,
3910 then do anything left in the else blocks. */
3911 start_sequence ();
3912 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3913 &then_vals, &else_vals, false)
3914 || (else_bb
3915 && !cond_move_convert_if_block (if_info, else_bb, cond,
3916 &then_vals, &else_vals, true)))
3918 end_sequence ();
3919 goto done;
3921 seq = end_ifcvt_sequence (if_info);
3922 if (!seq)
3923 goto done;
3925 loc_insn = first_active_insn (then_bb);
3926 if (!loc_insn)
3928 loc_insn = first_active_insn (else_bb);
3929 gcc_assert (loc_insn);
3931 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3933 if (else_bb)
3935 delete_basic_block (else_bb);
3936 num_true_changes++;
3938 else
3939 remove_edge (find_edge (test_bb, join_bb));
3941 remove_edge (find_edge (then_bb, join_bb));
3942 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3943 delete_basic_block (then_bb);
3944 num_true_changes++;
3946 if (can_merge_blocks_p (test_bb, join_bb))
3948 merge_blocks (test_bb, join_bb);
3949 num_true_changes++;
3952 num_updated_if_blocks++;
3953 success_p = TRUE;
3955 done:
3956 then_regs.release ();
3957 else_regs.release ();
3958 return success_p;
3962 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3963 IF-THEN-ELSE-JOIN block.
3965 If so, we'll try to convert the insns to not require the branch,
3966 using only transformations that do not require conditional execution.
3968 Return TRUE if we were successful at converting the block. */
3970 static int
3971 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3972 int pass)
3974 basic_block then_bb, else_bb, join_bb;
3975 bool then_else_reversed = false;
3976 rtx_insn *jump;
3977 rtx cond;
3978 rtx_insn *cond_earliest;
3979 struct noce_if_info if_info;
3980 bool speed_p = optimize_bb_for_speed_p (test_bb);
3982 /* We only ever should get here before reload. */
3983 gcc_assert (!reload_completed);
3985 /* Recognize an IF-THEN-ELSE-JOIN block. */
3986 if (single_pred_p (then_edge->dest)
3987 && single_succ_p (then_edge->dest)
3988 && single_pred_p (else_edge->dest)
3989 && single_succ_p (else_edge->dest)
3990 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3992 then_bb = then_edge->dest;
3993 else_bb = else_edge->dest;
3994 join_bb = single_succ (then_bb);
3996 /* Recognize an IF-THEN-JOIN block. */
3997 else if (single_pred_p (then_edge->dest)
3998 && single_succ_p (then_edge->dest)
3999 && single_succ (then_edge->dest) == else_edge->dest)
4001 then_bb = then_edge->dest;
4002 else_bb = NULL_BLOCK;
4003 join_bb = else_edge->dest;
4005 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
4006 of basic blocks in cfglayout mode does not matter, so the fallthrough
4007 edge can go to any basic block (and not just to bb->next_bb, like in
4008 cfgrtl mode). */
4009 else if (single_pred_p (else_edge->dest)
4010 && single_succ_p (else_edge->dest)
4011 && single_succ (else_edge->dest) == then_edge->dest)
4013 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
4014 To make this work, we have to invert the THEN and ELSE blocks
4015 and reverse the jump condition. */
4016 then_bb = else_edge->dest;
4017 else_bb = NULL_BLOCK;
4018 join_bb = single_succ (then_bb);
4019 then_else_reversed = true;
4021 else
4022 /* Not a form we can handle. */
4023 return FALSE;
4025 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
4026 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
4027 return FALSE;
4028 if (else_bb
4029 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
4030 return FALSE;
4032 num_possible_if_blocks++;
4034 if (dump_file)
4036 fprintf (dump_file,
4037 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
4038 (else_bb) ? "-ELSE" : "",
4039 pass, test_bb->index, then_bb->index);
4041 if (else_bb)
4042 fprintf (dump_file, ", else %d", else_bb->index);
4044 fprintf (dump_file, ", join %d\n", join_bb->index);
4047 /* If the conditional jump is more than just a conditional
4048 jump, then we can not do if-conversion on this block. */
4049 jump = BB_END (test_bb);
4050 if (! onlyjump_p (jump))
4051 return FALSE;
4053 /* If this is not a standard conditional jump, we can't parse it. */
4054 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
4055 if (!cond)
4056 return FALSE;
4058 /* We must be comparing objects whose modes imply the size. */
4059 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
4060 return FALSE;
4062 /* Initialize an IF_INFO struct to pass around. */
4063 memset (&if_info, 0, sizeof if_info);
4064 if_info.test_bb = test_bb;
4065 if_info.then_bb = then_bb;
4066 if_info.else_bb = else_bb;
4067 if_info.join_bb = join_bb;
4068 if_info.cond = cond;
4069 if_info.cond_earliest = cond_earliest;
4070 if_info.jump = jump;
4071 if_info.then_else_reversed = then_else_reversed;
4072 if_info.speed_p = speed_p;
4073 if_info.max_seq_cost
4074 = targetm.max_noce_ifcvt_seq_cost (then_edge);
4075 /* We'll add in the cost of THEN_BB and ELSE_BB later, when we check
4076 that they are valid to transform. We can't easily get back to the insn
4077 for COND (and it may not exist if we had to canonicalize to get COND),
4078 and jump_insns are always given a cost of 1 by seq_cost, so treat
4079 both instructions as having cost COSTS_N_INSNS (1). */
4080 if_info.original_cost = COSTS_N_INSNS (2);
4083 /* Do the real work. */
4085 if (noce_process_if_block (&if_info))
4086 return TRUE;
4088 if (HAVE_conditional_move
4089 && cond_move_process_if_block (&if_info))
4090 return TRUE;
4092 return FALSE;
4096 /* Merge the blocks and mark for local life update. */
4098 static void
4099 merge_if_block (struct ce_if_block * ce_info)
4101 basic_block test_bb = ce_info->test_bb; /* last test block */
4102 basic_block then_bb = ce_info->then_bb; /* THEN */
4103 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
4104 basic_block join_bb = ce_info->join_bb; /* join block */
4105 basic_block combo_bb;
4107 /* All block merging is done into the lower block numbers. */
4109 combo_bb = test_bb;
4110 df_set_bb_dirty (test_bb);
4112 /* Merge any basic blocks to handle && and || subtests. Each of
4113 the blocks are on the fallthru path from the predecessor block. */
4114 if (ce_info->num_multiple_test_blocks > 0)
4116 basic_block bb = test_bb;
4117 basic_block last_test_bb = ce_info->last_test_bb;
4118 basic_block fallthru = block_fallthru (bb);
4122 bb = fallthru;
4123 fallthru = block_fallthru (bb);
4124 merge_blocks (combo_bb, bb);
4125 num_true_changes++;
4127 while (bb != last_test_bb);
4130 /* Merge TEST block into THEN block. Normally the THEN block won't have a
4131 label, but it might if there were || tests. That label's count should be
4132 zero, and it normally should be removed. */
4134 if (then_bb)
4136 /* If THEN_BB has no successors, then there's a BARRIER after it.
4137 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
4138 is no longer needed, and in fact it is incorrect to leave it in
4139 the insn stream. */
4140 if (EDGE_COUNT (then_bb->succs) == 0
4141 && EDGE_COUNT (combo_bb->succs) > 1)
4143 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
4144 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4145 end = NEXT_INSN (end);
4147 if (end && BARRIER_P (end))
4148 delete_insn (end);
4150 merge_blocks (combo_bb, then_bb);
4151 num_true_changes++;
4154 /* The ELSE block, if it existed, had a label. That label count
4155 will almost always be zero, but odd things can happen when labels
4156 get their addresses taken. */
4157 if (else_bb)
4159 /* If ELSE_BB has no successors, then there's a BARRIER after it.
4160 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
4161 is no longer needed, and in fact it is incorrect to leave it in
4162 the insn stream. */
4163 if (EDGE_COUNT (else_bb->succs) == 0
4164 && EDGE_COUNT (combo_bb->succs) > 1)
4166 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
4167 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4168 end = NEXT_INSN (end);
4170 if (end && BARRIER_P (end))
4171 delete_insn (end);
4173 merge_blocks (combo_bb, else_bb);
4174 num_true_changes++;
4177 /* If there was no join block reported, that means it was not adjacent
4178 to the others, and so we cannot merge them. */
4180 if (! join_bb)
4182 rtx_insn *last = BB_END (combo_bb);
4184 /* The outgoing edge for the current COMBO block should already
4185 be correct. Verify this. */
4186 if (EDGE_COUNT (combo_bb->succs) == 0)
4187 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
4188 || (NONJUMP_INSN_P (last)
4189 && GET_CODE (PATTERN (last)) == TRAP_IF
4190 && (TRAP_CONDITION (PATTERN (last))
4191 == const_true_rtx)));
4193 else
4194 /* There should still be something at the end of the THEN or ELSE
4195 blocks taking us to our final destination. */
4196 gcc_assert (JUMP_P (last)
4197 || (EDGE_SUCC (combo_bb, 0)->dest
4198 == EXIT_BLOCK_PTR_FOR_FN (cfun)
4199 && CALL_P (last)
4200 && SIBLING_CALL_P (last))
4201 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
4202 && can_throw_internal (last)));
4205 /* The JOIN block may have had quite a number of other predecessors too.
4206 Since we've already merged the TEST, THEN and ELSE blocks, we should
4207 have only one remaining edge from our if-then-else diamond. If there
4208 is more than one remaining edge, it must come from elsewhere. There
4209 may be zero incoming edges if the THEN block didn't actually join
4210 back up (as with a call to a non-return function). */
4211 else if (EDGE_COUNT (join_bb->preds) < 2
4212 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4214 /* We can merge the JOIN cleanly and update the dataflow try
4215 again on this pass.*/
4216 merge_blocks (combo_bb, join_bb);
4217 num_true_changes++;
4219 else
4221 /* We cannot merge the JOIN. */
4223 /* The outgoing edge for the current COMBO block should already
4224 be correct. Verify this. */
4225 gcc_assert (single_succ_p (combo_bb)
4226 && single_succ (combo_bb) == join_bb);
4228 /* Remove the jump and cruft from the end of the COMBO block. */
4229 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4230 tidy_fallthru_edge (single_succ_edge (combo_bb));
4233 num_updated_if_blocks++;
4236 /* Find a block ending in a simple IF condition and try to transform it
4237 in some way. When converting a multi-block condition, put the new code
4238 in the first such block and delete the rest. Return a pointer to this
4239 first block if some transformation was done. Return NULL otherwise. */
4241 static basic_block
4242 find_if_header (basic_block test_bb, int pass)
4244 ce_if_block ce_info;
4245 edge then_edge;
4246 edge else_edge;
4248 /* The kind of block we're looking for has exactly two successors. */
4249 if (EDGE_COUNT (test_bb->succs) != 2)
4250 return NULL;
4252 then_edge = EDGE_SUCC (test_bb, 0);
4253 else_edge = EDGE_SUCC (test_bb, 1);
4255 if (df_get_bb_dirty (then_edge->dest))
4256 return NULL;
4257 if (df_get_bb_dirty (else_edge->dest))
4258 return NULL;
4260 /* Neither edge should be abnormal. */
4261 if ((then_edge->flags & EDGE_COMPLEX)
4262 || (else_edge->flags & EDGE_COMPLEX))
4263 return NULL;
4265 /* Nor exit the loop. */
4266 if ((then_edge->flags & EDGE_LOOP_EXIT)
4267 || (else_edge->flags & EDGE_LOOP_EXIT))
4268 return NULL;
4270 /* The THEN edge is canonically the one that falls through. */
4271 if (then_edge->flags & EDGE_FALLTHRU)
4273 else if (else_edge->flags & EDGE_FALLTHRU)
4274 std::swap (then_edge, else_edge);
4275 else
4276 /* Otherwise this must be a multiway branch of some sort. */
4277 return NULL;
4279 memset (&ce_info, 0, sizeof (ce_info));
4280 ce_info.test_bb = test_bb;
4281 ce_info.then_bb = then_edge->dest;
4282 ce_info.else_bb = else_edge->dest;
4283 ce_info.pass = pass;
4285 #ifdef IFCVT_MACHDEP_INIT
4286 IFCVT_MACHDEP_INIT (&ce_info);
4287 #endif
4289 if (!reload_completed
4290 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
4291 goto success;
4293 if (reload_completed
4294 && targetm.have_conditional_execution ()
4295 && cond_exec_find_if_block (&ce_info))
4296 goto success;
4298 if (targetm.have_trap ()
4299 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
4300 && find_cond_trap (test_bb, then_edge, else_edge))
4301 goto success;
4303 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
4304 && (reload_completed || !targetm.have_conditional_execution ()))
4306 if (find_if_case_1 (test_bb, then_edge, else_edge))
4307 goto success;
4308 if (find_if_case_2 (test_bb, then_edge, else_edge))
4309 goto success;
4312 return NULL;
4314 success:
4315 if (dump_file)
4316 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
4317 /* Set this so we continue looking. */
4318 cond_exec_changed_p = TRUE;
4319 return ce_info.test_bb;
4322 /* Return true if a block has two edges, one of which falls through to the next
4323 block, and the other jumps to a specific block, so that we can tell if the
4324 block is part of an && test or an || test. Returns either -1 or the number
4325 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
4327 static int
4328 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
4330 edge cur_edge;
4331 int fallthru_p = FALSE;
4332 int jump_p = FALSE;
4333 rtx_insn *insn;
4334 rtx_insn *end;
4335 int n_insns = 0;
4336 edge_iterator ei;
4338 if (!cur_bb || !target_bb)
4339 return -1;
4341 /* If no edges, obviously it doesn't jump or fallthru. */
4342 if (EDGE_COUNT (cur_bb->succs) == 0)
4343 return FALSE;
4345 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
4347 if (cur_edge->flags & EDGE_COMPLEX)
4348 /* Anything complex isn't what we want. */
4349 return -1;
4351 else if (cur_edge->flags & EDGE_FALLTHRU)
4352 fallthru_p = TRUE;
4354 else if (cur_edge->dest == target_bb)
4355 jump_p = TRUE;
4357 else
4358 return -1;
4361 if ((jump_p & fallthru_p) == 0)
4362 return -1;
4364 /* Don't allow calls in the block, since this is used to group && and ||
4365 together for conditional execution support. ??? we should support
4366 conditional execution support across calls for IA-64 some day, but
4367 for now it makes the code simpler. */
4368 end = BB_END (cur_bb);
4369 insn = BB_HEAD (cur_bb);
4371 while (insn != NULL_RTX)
4373 if (CALL_P (insn))
4374 return -1;
4376 if (INSN_P (insn)
4377 && !JUMP_P (insn)
4378 && !DEBUG_INSN_P (insn)
4379 && GET_CODE (PATTERN (insn)) != USE
4380 && GET_CODE (PATTERN (insn)) != CLOBBER)
4381 n_insns++;
4383 if (insn == end)
4384 break;
4386 insn = NEXT_INSN (insn);
4389 return n_insns;
4392 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
4393 block. If so, we'll try to convert the insns to not require the branch.
4394 Return TRUE if we were successful at converting the block. */
4396 static int
4397 cond_exec_find_if_block (struct ce_if_block * ce_info)
4399 basic_block test_bb = ce_info->test_bb;
4400 basic_block then_bb = ce_info->then_bb;
4401 basic_block else_bb = ce_info->else_bb;
4402 basic_block join_bb = NULL_BLOCK;
4403 edge cur_edge;
4404 basic_block next;
4405 edge_iterator ei;
4407 ce_info->last_test_bb = test_bb;
4409 /* We only ever should get here after reload,
4410 and if we have conditional execution. */
4411 gcc_assert (reload_completed && targetm.have_conditional_execution ());
4413 /* Discover if any fall through predecessors of the current test basic block
4414 were && tests (which jump to the else block) or || tests (which jump to
4415 the then block). */
4416 if (single_pred_p (test_bb)
4417 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
4419 basic_block bb = single_pred (test_bb);
4420 basic_block target_bb;
4421 int max_insns = MAX_CONDITIONAL_EXECUTE;
4422 int n_insns;
4424 /* Determine if the preceding block is an && or || block. */
4425 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
4427 ce_info->and_and_p = TRUE;
4428 target_bb = else_bb;
4430 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
4432 ce_info->and_and_p = FALSE;
4433 target_bb = then_bb;
4435 else
4436 target_bb = NULL_BLOCK;
4438 if (target_bb && n_insns <= max_insns)
4440 int total_insns = 0;
4441 int blocks = 0;
4443 ce_info->last_test_bb = test_bb;
4445 /* Found at least one && or || block, look for more. */
4448 ce_info->test_bb = test_bb = bb;
4449 total_insns += n_insns;
4450 blocks++;
4452 if (!single_pred_p (bb))
4453 break;
4455 bb = single_pred (bb);
4456 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
4458 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
4460 ce_info->num_multiple_test_blocks = blocks;
4461 ce_info->num_multiple_test_insns = total_insns;
4463 if (ce_info->and_and_p)
4464 ce_info->num_and_and_blocks = blocks;
4465 else
4466 ce_info->num_or_or_blocks = blocks;
4470 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
4471 other than any || blocks which jump to the THEN block. */
4472 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
4473 return FALSE;
4475 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
4476 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
4478 if (cur_edge->flags & EDGE_COMPLEX)
4479 return FALSE;
4482 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
4484 if (cur_edge->flags & EDGE_COMPLEX)
4485 return FALSE;
4488 /* The THEN block of an IF-THEN combo must have zero or one successors. */
4489 if (EDGE_COUNT (then_bb->succs) > 0
4490 && (!single_succ_p (then_bb)
4491 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
4492 || (epilogue_completed
4493 && tablejump_p (BB_END (then_bb), NULL, NULL))))
4494 return FALSE;
4496 /* If the THEN block has no successors, conditional execution can still
4497 make a conditional call. Don't do this unless the ELSE block has
4498 only one incoming edge -- the CFG manipulation is too ugly otherwise.
4499 Check for the last insn of the THEN block being an indirect jump, which
4500 is listed as not having any successors, but confuses the rest of the CE
4501 code processing. ??? we should fix this in the future. */
4502 if (EDGE_COUNT (then_bb->succs) == 0)
4504 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4506 rtx_insn *last_insn = BB_END (then_bb);
4508 while (last_insn
4509 && NOTE_P (last_insn)
4510 && last_insn != BB_HEAD (then_bb))
4511 last_insn = PREV_INSN (last_insn);
4513 if (last_insn
4514 && JUMP_P (last_insn)
4515 && ! simplejump_p (last_insn))
4516 return FALSE;
4518 join_bb = else_bb;
4519 else_bb = NULL_BLOCK;
4521 else
4522 return FALSE;
4525 /* If the THEN block's successor is the other edge out of the TEST block,
4526 then we have an IF-THEN combo without an ELSE. */
4527 else if (single_succ (then_bb) == else_bb)
4529 join_bb = else_bb;
4530 else_bb = NULL_BLOCK;
4533 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
4534 has exactly one predecessor and one successor, and the outgoing edge
4535 is not complex, then we have an IF-THEN-ELSE combo. */
4536 else if (single_succ_p (else_bb)
4537 && single_succ (then_bb) == single_succ (else_bb)
4538 && single_pred_p (else_bb)
4539 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
4540 && !(epilogue_completed
4541 && tablejump_p (BB_END (else_bb), NULL, NULL)))
4542 join_bb = single_succ (else_bb);
4544 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
4545 else
4546 return FALSE;
4548 num_possible_if_blocks++;
4550 if (dump_file)
4552 fprintf (dump_file,
4553 "\nIF-THEN%s block found, pass %d, start block %d "
4554 "[insn %d], then %d [%d]",
4555 (else_bb) ? "-ELSE" : "",
4556 ce_info->pass,
4557 test_bb->index,
4558 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
4559 then_bb->index,
4560 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
4562 if (else_bb)
4563 fprintf (dump_file, ", else %d [%d]",
4564 else_bb->index,
4565 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
4567 fprintf (dump_file, ", join %d [%d]",
4568 join_bb->index,
4569 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
4571 if (ce_info->num_multiple_test_blocks > 0)
4572 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
4573 ce_info->num_multiple_test_blocks,
4574 (ce_info->and_and_p) ? "&&" : "||",
4575 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
4576 ce_info->last_test_bb->index,
4577 ((BB_HEAD (ce_info->last_test_bb))
4578 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
4579 : -1));
4581 fputc ('\n', dump_file);
4584 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
4585 first condition for free, since we've already asserted that there's a
4586 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
4587 we checked the FALLTHRU flag, those are already adjacent to the last IF
4588 block. */
4589 /* ??? As an enhancement, move the ELSE block. Have to deal with
4590 BLOCK notes, if by no other means than backing out the merge if they
4591 exist. Sticky enough I don't want to think about it now. */
4592 next = then_bb;
4593 if (else_bb && (next = next->next_bb) != else_bb)
4594 return FALSE;
4595 if ((next = next->next_bb) != join_bb
4596 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4598 if (else_bb)
4599 join_bb = NULL;
4600 else
4601 return FALSE;
4604 /* Do the real work. */
4606 ce_info->else_bb = else_bb;
4607 ce_info->join_bb = join_bb;
4609 /* If we have && and || tests, try to first handle combining the && and ||
4610 tests into the conditional code, and if that fails, go back and handle
4611 it without the && and ||, which at present handles the && case if there
4612 was no ELSE block. */
4613 if (cond_exec_process_if_block (ce_info, TRUE))
4614 return TRUE;
4616 if (ce_info->num_multiple_test_blocks)
4618 cancel_changes (0);
4620 if (cond_exec_process_if_block (ce_info, FALSE))
4621 return TRUE;
4624 return FALSE;
4627 /* Convert a branch over a trap, or a branch
4628 to a trap, into a conditional trap. */
4630 static int
4631 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
4633 basic_block then_bb = then_edge->dest;
4634 basic_block else_bb = else_edge->dest;
4635 basic_block other_bb, trap_bb;
4636 rtx_insn *trap, *jump;
4637 rtx cond;
4638 rtx_insn *cond_earliest;
4639 enum rtx_code code;
4641 /* Locate the block with the trap instruction. */
4642 /* ??? While we look for no successors, we really ought to allow
4643 EH successors. Need to fix merge_if_block for that to work. */
4644 if ((trap = block_has_only_trap (then_bb)) != NULL)
4645 trap_bb = then_bb, other_bb = else_bb;
4646 else if ((trap = block_has_only_trap (else_bb)) != NULL)
4647 trap_bb = else_bb, other_bb = then_bb;
4648 else
4649 return FALSE;
4651 if (dump_file)
4653 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
4654 test_bb->index, trap_bb->index);
4657 /* If this is not a standard conditional jump, we can't parse it. */
4658 jump = BB_END (test_bb);
4659 cond = noce_get_condition (jump, &cond_earliest, false);
4660 if (! cond)
4661 return FALSE;
4663 /* If the conditional jump is more than just a conditional jump, then
4664 we can not do if-conversion on this block. Give up for returnjump_p,
4665 changing a conditional return followed by unconditional trap for
4666 conditional trap followed by unconditional return is likely not
4667 beneficial and harder to handle. */
4668 if (! onlyjump_p (jump) || returnjump_p (jump))
4669 return FALSE;
4671 /* We must be comparing objects whose modes imply the size. */
4672 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
4673 return FALSE;
4675 /* Reverse the comparison code, if necessary. */
4676 code = GET_CODE (cond);
4677 if (then_bb == trap_bb)
4679 code = reversed_comparison_code (cond, jump);
4680 if (code == UNKNOWN)
4681 return FALSE;
4684 /* Attempt to generate the conditional trap. */
4685 rtx_insn *seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
4686 copy_rtx (XEXP (cond, 1)),
4687 TRAP_CODE (PATTERN (trap)));
4688 if (seq == NULL)
4689 return FALSE;
4691 /* Emit the new insns before cond_earliest. */
4692 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
4694 /* Delete the trap block if possible. */
4695 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
4696 df_set_bb_dirty (test_bb);
4697 df_set_bb_dirty (then_bb);
4698 df_set_bb_dirty (else_bb);
4700 if (EDGE_COUNT (trap_bb->preds) == 0)
4702 delete_basic_block (trap_bb);
4703 num_true_changes++;
4706 /* Wire together the blocks again. */
4707 if (current_ir_type () == IR_RTL_CFGLAYOUT)
4708 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
4709 else if (trap_bb == then_bb)
4711 rtx lab = JUMP_LABEL (jump);
4712 rtx_insn *seq = targetm.gen_jump (lab);
4713 rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
4714 LABEL_NUSES (lab) += 1;
4715 JUMP_LABEL (newjump) = lab;
4716 emit_barrier_after (newjump);
4718 delete_insn (jump);
4720 if (can_merge_blocks_p (test_bb, other_bb))
4722 merge_blocks (test_bb, other_bb);
4723 num_true_changes++;
4726 num_updated_if_blocks++;
4727 return TRUE;
4730 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
4731 return it. */
4733 static rtx_insn *
4734 block_has_only_trap (basic_block bb)
4736 rtx_insn *trap;
4738 /* We're not the exit block. */
4739 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4740 return NULL;
4742 /* The block must have no successors. */
4743 if (EDGE_COUNT (bb->succs) > 0)
4744 return NULL;
4746 /* The only instruction in the THEN block must be the trap. */
4747 trap = first_active_insn (bb);
4748 if (! (trap == BB_END (bb)
4749 && GET_CODE (PATTERN (trap)) == TRAP_IF
4750 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
4751 return NULL;
4753 return trap;
4756 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
4757 transformable, but not necessarily the other. There need be no
4758 JOIN block.
4760 Return TRUE if we were successful at converting the block.
4762 Cases we'd like to look at:
4765 if (test) goto over; // x not live
4766 x = a;
4767 goto label;
4768 over:
4770 becomes
4772 x = a;
4773 if (! test) goto label;
4776 if (test) goto E; // x not live
4777 x = big();
4778 goto L;
4780 x = b;
4781 goto M;
4783 becomes
4785 x = b;
4786 if (test) goto M;
4787 x = big();
4788 goto L;
4790 (3) // This one's really only interesting for targets that can do
4791 // multiway branching, e.g. IA-64 BBB bundles. For other targets
4792 // it results in multiple branches on a cache line, which often
4793 // does not sit well with predictors.
4795 if (test1) goto E; // predicted not taken
4796 x = a;
4797 if (test2) goto F;
4800 x = b;
4803 becomes
4805 x = a;
4806 if (test1) goto E;
4807 if (test2) goto F;
4809 Notes:
4811 (A) Don't do (2) if the branch is predicted against the block we're
4812 eliminating. Do it anyway if we can eliminate a branch; this requires
4813 that the sole successor of the eliminated block postdominate the other
4814 side of the if.
4816 (B) With CE, on (3) we can steal from both sides of the if, creating
4818 if (test1) x = a;
4819 if (!test1) x = b;
4820 if (test1) goto J;
4821 if (test2) goto F;
4825 Again, this is most useful if J postdominates.
4827 (C) CE substitutes for helpful life information.
4829 (D) These heuristics need a lot of work. */
4831 /* Tests for case 1 above. */
4833 static int
4834 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
4836 basic_block then_bb = then_edge->dest;
4837 basic_block else_bb = else_edge->dest;
4838 basic_block new_bb;
4839 int then_bb_index, then_prob;
4840 rtx else_target = NULL_RTX;
4842 /* If we are partitioning hot/cold basic blocks, we don't want to
4843 mess up unconditional or indirect jumps that cross between hot
4844 and cold sections.
4846 Basic block partitioning may result in some jumps that appear to
4847 be optimizable (or blocks that appear to be mergeable), but which really
4848 must be left untouched (they are required to make it safely across
4849 partition boundaries). See the comments at the top of
4850 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4852 if ((BB_END (then_bb)
4853 && JUMP_P (BB_END (then_bb))
4854 && CROSSING_JUMP_P (BB_END (then_bb)))
4855 || (BB_END (test_bb)
4856 && JUMP_P (BB_END (test_bb))
4857 && CROSSING_JUMP_P (BB_END (test_bb)))
4858 || (BB_END (else_bb)
4859 && JUMP_P (BB_END (else_bb))
4860 && CROSSING_JUMP_P (BB_END (else_bb))))
4861 return FALSE;
4863 /* THEN has one successor. */
4864 if (!single_succ_p (then_bb))
4865 return FALSE;
4867 /* THEN does not fall through, but is not strange either. */
4868 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
4869 return FALSE;
4871 /* THEN has one predecessor. */
4872 if (!single_pred_p (then_bb))
4873 return FALSE;
4875 /* THEN must do something. */
4876 if (forwarder_block_p (then_bb))
4877 return FALSE;
4879 num_possible_if_blocks++;
4880 if (dump_file)
4881 fprintf (dump_file,
4882 "\nIF-CASE-1 found, start %d, then %d\n",
4883 test_bb->index, then_bb->index);
4885 if (then_edge->probability)
4886 then_prob = REG_BR_PROB_BASE - then_edge->probability;
4887 else
4888 then_prob = REG_BR_PROB_BASE / 2;
4890 /* We're speculating from the THEN path, we want to make sure the cost
4891 of speculation is within reason. */
4892 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4893 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4894 predictable_edge_p (then_edge)))))
4895 return FALSE;
4897 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4899 rtx_insn *jump = BB_END (else_edge->src);
4900 gcc_assert (JUMP_P (jump));
4901 else_target = JUMP_LABEL (jump);
4904 /* Registers set are dead, or are predicable. */
4905 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4906 single_succ_edge (then_bb), 1))
4907 return FALSE;
4909 /* Conversion went ok, including moving the insns and fixing up the
4910 jump. Adjust the CFG to match. */
4912 /* We can avoid creating a new basic block if then_bb is immediately
4913 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4914 through to else_bb. */
4916 if (then_bb->next_bb == else_bb
4917 && then_bb->prev_bb == test_bb
4918 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4920 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4921 new_bb = 0;
4923 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4924 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4925 else_bb, else_target);
4926 else
4927 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4928 else_bb);
4930 df_set_bb_dirty (test_bb);
4931 df_set_bb_dirty (else_bb);
4933 then_bb_index = then_bb->index;
4934 delete_basic_block (then_bb);
4936 /* Make rest of code believe that the newly created block is the THEN_BB
4937 block we removed. */
4938 if (new_bb)
4940 df_bb_replace (then_bb_index, new_bb);
4941 /* This should have been done above via force_nonfallthru_and_redirect
4942 (possibly called from redirect_edge_and_branch_force). */
4943 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4946 num_true_changes++;
4947 num_updated_if_blocks++;
4948 return TRUE;
4951 /* Test for case 2 above. */
4953 static int
4954 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4956 basic_block then_bb = then_edge->dest;
4957 basic_block else_bb = else_edge->dest;
4958 edge else_succ;
4959 int then_prob, else_prob;
4961 /* We do not want to speculate (empty) loop latches. */
4962 if (current_loops
4963 && else_bb->loop_father->latch == else_bb)
4964 return FALSE;
4966 /* If we are partitioning hot/cold basic blocks, we don't want to
4967 mess up unconditional or indirect jumps that cross between hot
4968 and cold sections.
4970 Basic block partitioning may result in some jumps that appear to
4971 be optimizable (or blocks that appear to be mergeable), but which really
4972 must be left untouched (they are required to make it safely across
4973 partition boundaries). See the comments at the top of
4974 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4976 if ((BB_END (then_bb)
4977 && JUMP_P (BB_END (then_bb))
4978 && CROSSING_JUMP_P (BB_END (then_bb)))
4979 || (BB_END (test_bb)
4980 && JUMP_P (BB_END (test_bb))
4981 && CROSSING_JUMP_P (BB_END (test_bb)))
4982 || (BB_END (else_bb)
4983 && JUMP_P (BB_END (else_bb))
4984 && CROSSING_JUMP_P (BB_END (else_bb))))
4985 return FALSE;
4987 /* ELSE has one successor. */
4988 if (!single_succ_p (else_bb))
4989 return FALSE;
4990 else
4991 else_succ = single_succ_edge (else_bb);
4993 /* ELSE outgoing edge is not complex. */
4994 if (else_succ->flags & EDGE_COMPLEX)
4995 return FALSE;
4997 /* ELSE has one predecessor. */
4998 if (!single_pred_p (else_bb))
4999 return FALSE;
5001 /* THEN is not EXIT. */
5002 if (then_bb->index < NUM_FIXED_BLOCKS)
5003 return FALSE;
5005 if (else_edge->probability)
5007 else_prob = else_edge->probability;
5008 then_prob = REG_BR_PROB_BASE - else_prob;
5010 else
5012 else_prob = REG_BR_PROB_BASE / 2;
5013 then_prob = REG_BR_PROB_BASE / 2;
5016 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
5017 if (else_prob > then_prob)
5019 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
5020 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
5021 else_succ->dest))
5023 else
5024 return FALSE;
5026 num_possible_if_blocks++;
5027 if (dump_file)
5028 fprintf (dump_file,
5029 "\nIF-CASE-2 found, start %d, else %d\n",
5030 test_bb->index, else_bb->index);
5032 /* We're speculating from the ELSE path, we want to make sure the cost
5033 of speculation is within reason. */
5034 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
5035 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
5036 predictable_edge_p (else_edge)))))
5037 return FALSE;
5039 /* Registers set are dead, or are predicable. */
5040 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
5041 return FALSE;
5043 /* Conversion went ok, including moving the insns and fixing up the
5044 jump. Adjust the CFG to match. */
5046 df_set_bb_dirty (test_bb);
5047 df_set_bb_dirty (then_bb);
5048 delete_basic_block (else_bb);
5050 num_true_changes++;
5051 num_updated_if_blocks++;
5053 /* ??? We may now fallthru from one of THEN's successors into a join
5054 block. Rerun cleanup_cfg? Examine things manually? Wait? */
5056 return TRUE;
5059 /* Used by the code above to perform the actual rtl transformations.
5060 Return TRUE if successful.
5062 TEST_BB is the block containing the conditional branch. MERGE_BB
5063 is the block containing the code to manipulate. DEST_EDGE is an
5064 edge representing a jump to the join block; after the conversion,
5065 TEST_BB should be branching to its destination.
5066 REVERSEP is true if the sense of the branch should be reversed. */
5068 static int
5069 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
5070 basic_block other_bb, edge dest_edge, int reversep)
5072 basic_block new_dest = dest_edge->dest;
5073 rtx_insn *head, *end, *jump;
5074 rtx_insn *earliest = NULL;
5075 rtx old_dest;
5076 bitmap merge_set = NULL;
5077 /* Number of pending changes. */
5078 int n_validated_changes = 0;
5079 rtx new_dest_label = NULL_RTX;
5081 jump = BB_END (test_bb);
5083 /* Find the extent of the real code in the merge block. */
5084 head = BB_HEAD (merge_bb);
5085 end = BB_END (merge_bb);
5087 while (DEBUG_INSN_P (end) && end != head)
5088 end = PREV_INSN (end);
5090 /* If merge_bb ends with a tablejump, predicating/moving insn's
5091 into test_bb and then deleting merge_bb will result in the jumptable
5092 that follows merge_bb being removed along with merge_bb and then we
5093 get an unresolved reference to the jumptable. */
5094 if (tablejump_p (end, NULL, NULL))
5095 return FALSE;
5097 if (LABEL_P (head))
5098 head = NEXT_INSN (head);
5099 while (DEBUG_INSN_P (head) && head != end)
5100 head = NEXT_INSN (head);
5101 if (NOTE_P (head))
5103 if (head == end)
5105 head = end = NULL;
5106 goto no_body;
5108 head = NEXT_INSN (head);
5109 while (DEBUG_INSN_P (head) && head != end)
5110 head = NEXT_INSN (head);
5113 if (JUMP_P (end))
5115 if (!onlyjump_p (end))
5116 return FALSE;
5117 if (head == end)
5119 head = end = NULL;
5120 goto no_body;
5122 end = PREV_INSN (end);
5123 while (DEBUG_INSN_P (end) && end != head)
5124 end = PREV_INSN (end);
5127 /* Don't move frame-related insn across the conditional branch. This
5128 can lead to one of the paths of the branch having wrong unwind info. */
5129 if (epilogue_completed)
5131 rtx_insn *insn = head;
5132 while (1)
5134 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
5135 return FALSE;
5136 if (insn == end)
5137 break;
5138 insn = NEXT_INSN (insn);
5142 /* Disable handling dead code by conditional execution if the machine needs
5143 to do anything funny with the tests, etc. */
5144 #ifndef IFCVT_MODIFY_TESTS
5145 if (targetm.have_conditional_execution ())
5147 /* In the conditional execution case, we have things easy. We know
5148 the condition is reversible. We don't have to check life info
5149 because we're going to conditionally execute the code anyway.
5150 All that's left is making sure the insns involved can actually
5151 be predicated. */
5153 rtx cond;
5155 cond = cond_exec_get_condition (jump);
5156 if (! cond)
5157 return FALSE;
5159 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
5160 int prob_val = (note ? XINT (note, 0) : -1);
5162 if (reversep)
5164 enum rtx_code rev = reversed_comparison_code (cond, jump);
5165 if (rev == UNKNOWN)
5166 return FALSE;
5167 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
5168 XEXP (cond, 1));
5169 if (prob_val >= 0)
5170 prob_val = REG_BR_PROB_BASE - prob_val;
5173 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
5174 && verify_changes (0))
5175 n_validated_changes = num_validated_changes ();
5176 else
5177 cancel_changes (0);
5179 earliest = jump;
5181 #endif
5183 /* If we allocated new pseudos (e.g. in the conditional move
5184 expander called from noce_emit_cmove), we must resize the
5185 array first. */
5186 if (max_regno < max_reg_num ())
5187 max_regno = max_reg_num ();
5189 /* Try the NCE path if the CE path did not result in any changes. */
5190 if (n_validated_changes == 0)
5192 rtx cond;
5193 rtx_insn *insn;
5194 regset live;
5195 bool success;
5197 /* In the non-conditional execution case, we have to verify that there
5198 are no trapping operations, no calls, no references to memory, and
5199 that any registers modified are dead at the branch site. */
5201 if (!any_condjump_p (jump))
5202 return FALSE;
5204 /* Find the extent of the conditional. */
5205 cond = noce_get_condition (jump, &earliest, false);
5206 if (!cond)
5207 return FALSE;
5209 live = BITMAP_ALLOC (&reg_obstack);
5210 simulate_backwards_to_point (merge_bb, live, end);
5211 success = can_move_insns_across (head, end, earliest, jump,
5212 merge_bb, live,
5213 df_get_live_in (other_bb), NULL);
5214 BITMAP_FREE (live);
5215 if (!success)
5216 return FALSE;
5218 /* Collect the set of registers set in MERGE_BB. */
5219 merge_set = BITMAP_ALLOC (&reg_obstack);
5221 FOR_BB_INSNS (merge_bb, insn)
5222 if (NONDEBUG_INSN_P (insn))
5223 df_simulate_find_defs (insn, merge_set);
5225 /* If shrink-wrapping, disable this optimization when test_bb is
5226 the first basic block and merge_bb exits. The idea is to not
5227 move code setting up a return register as that may clobber a
5228 register used to pass function parameters, which then must be
5229 saved in caller-saved regs. A caller-saved reg requires the
5230 prologue, killing a shrink-wrap opportunity. */
5231 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
5232 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
5233 && single_succ_p (new_dest)
5234 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
5235 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
5237 regset return_regs;
5238 unsigned int i;
5240 return_regs = BITMAP_ALLOC (&reg_obstack);
5242 /* Start off with the intersection of regs used to pass
5243 params and regs used to return values. */
5244 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5245 if (FUNCTION_ARG_REGNO_P (i)
5246 && targetm.calls.function_value_regno_p (i))
5247 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
5249 bitmap_and_into (return_regs,
5250 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5251 bitmap_and_into (return_regs,
5252 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
5253 if (!bitmap_empty_p (return_regs))
5255 FOR_BB_INSNS_REVERSE (new_dest, insn)
5256 if (NONDEBUG_INSN_P (insn))
5258 df_ref def;
5260 /* If this insn sets any reg in return_regs, add all
5261 reg uses to the set of regs we're interested in. */
5262 FOR_EACH_INSN_DEF (def, insn)
5263 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
5265 df_simulate_uses (insn, return_regs);
5266 break;
5269 if (bitmap_intersect_p (merge_set, return_regs))
5271 BITMAP_FREE (return_regs);
5272 BITMAP_FREE (merge_set);
5273 return FALSE;
5276 BITMAP_FREE (return_regs);
5280 no_body:
5281 /* We don't want to use normal invert_jump or redirect_jump because
5282 we don't want to delete_insn called. Also, we want to do our own
5283 change group management. */
5285 old_dest = JUMP_LABEL (jump);
5286 if (other_bb != new_dest)
5288 if (!any_condjump_p (jump))
5289 goto cancel;
5291 if (JUMP_P (BB_END (dest_edge->src)))
5292 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
5293 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
5294 new_dest_label = ret_rtx;
5295 else
5296 new_dest_label = block_label (new_dest);
5298 rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
5299 if (reversep
5300 ? ! invert_jump_1 (jump_insn, new_dest_label)
5301 : ! redirect_jump_1 (jump_insn, new_dest_label))
5302 goto cancel;
5305 if (verify_changes (n_validated_changes))
5306 confirm_change_group ();
5307 else
5308 goto cancel;
5310 if (other_bb != new_dest)
5312 redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
5313 0, reversep);
5315 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
5316 if (reversep)
5318 std::swap (BRANCH_EDGE (test_bb)->count,
5319 FALLTHRU_EDGE (test_bb)->count);
5320 std::swap (BRANCH_EDGE (test_bb)->probability,
5321 FALLTHRU_EDGE (test_bb)->probability);
5322 update_br_prob_note (test_bb);
5326 /* Move the insns out of MERGE_BB to before the branch. */
5327 if (head != NULL)
5329 rtx_insn *insn;
5331 if (end == BB_END (merge_bb))
5332 BB_END (merge_bb) = PREV_INSN (head);
5334 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
5335 notes being moved might become invalid. */
5336 insn = head;
5339 rtx note;
5341 if (! INSN_P (insn))
5342 continue;
5343 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5344 if (! note)
5345 continue;
5346 remove_note (insn, note);
5347 } while (insn != end && (insn = NEXT_INSN (insn)));
5349 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
5350 notes referring to the registers being set might become invalid. */
5351 if (merge_set)
5353 unsigned i;
5354 bitmap_iterator bi;
5356 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
5357 remove_reg_equal_equiv_notes_for_regno (i);
5359 BITMAP_FREE (merge_set);
5362 reorder_insns (head, end, PREV_INSN (earliest));
5365 /* Remove the jump and edge if we can. */
5366 if (other_bb == new_dest)
5368 delete_insn (jump);
5369 remove_edge (BRANCH_EDGE (test_bb));
5370 /* ??? Can't merge blocks here, as then_bb is still in use.
5371 At minimum, the merge will get done just before bb-reorder. */
5374 return TRUE;
5376 cancel:
5377 cancel_changes (0);
5379 if (merge_set)
5380 BITMAP_FREE (merge_set);
5382 return FALSE;
5385 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
5386 we are after combine pass. */
5388 static void
5389 if_convert (bool after_combine)
5391 basic_block bb;
5392 int pass;
5394 if (optimize == 1)
5396 df_live_add_problem ();
5397 df_live_set_all_dirty ();
5400 /* Record whether we are after combine pass. */
5401 ifcvt_after_combine = after_combine;
5402 have_cbranchcc4 = (direct_optab_handler (cbranch_optab, CCmode)
5403 != CODE_FOR_nothing);
5404 num_possible_if_blocks = 0;
5405 num_updated_if_blocks = 0;
5406 num_true_changes = 0;
5408 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5409 mark_loop_exit_edges ();
5410 loop_optimizer_finalize ();
5411 free_dominance_info (CDI_DOMINATORS);
5413 /* Compute postdominators. */
5414 calculate_dominance_info (CDI_POST_DOMINATORS);
5416 df_set_flags (DF_LR_RUN_DCE);
5418 /* Go through each of the basic blocks looking for things to convert. If we
5419 have conditional execution, we make multiple passes to allow us to handle
5420 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
5421 pass = 0;
5424 df_analyze ();
5425 /* Only need to do dce on the first pass. */
5426 df_clear_flags (DF_LR_RUN_DCE);
5427 cond_exec_changed_p = FALSE;
5428 pass++;
5430 #ifdef IFCVT_MULTIPLE_DUMPS
5431 if (dump_file && pass > 1)
5432 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
5433 #endif
5435 FOR_EACH_BB_FN (bb, cfun)
5437 basic_block new_bb;
5438 while (!df_get_bb_dirty (bb)
5439 && (new_bb = find_if_header (bb, pass)) != NULL)
5440 bb = new_bb;
5443 #ifdef IFCVT_MULTIPLE_DUMPS
5444 if (dump_file && cond_exec_changed_p)
5445 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
5446 #endif
5448 while (cond_exec_changed_p);
5450 #ifdef IFCVT_MULTIPLE_DUMPS
5451 if (dump_file)
5452 fprintf (dump_file, "\n\n========== no more changes\n");
5453 #endif
5455 free_dominance_info (CDI_POST_DOMINATORS);
5457 if (dump_file)
5458 fflush (dump_file);
5460 clear_aux_for_blocks ();
5462 /* If we allocated new pseudos, we must resize the array for sched1. */
5463 if (max_regno < max_reg_num ())
5464 max_regno = max_reg_num ();
5466 /* Write the final stats. */
5467 if (dump_file && num_possible_if_blocks > 0)
5469 fprintf (dump_file,
5470 "\n%d possible IF blocks searched.\n",
5471 num_possible_if_blocks);
5472 fprintf (dump_file,
5473 "%d IF blocks converted.\n",
5474 num_updated_if_blocks);
5475 fprintf (dump_file,
5476 "%d true changes made.\n\n\n",
5477 num_true_changes);
5480 if (optimize == 1)
5481 df_remove_problem (df_live);
5483 checking_verify_flow_info ();
5486 /* If-conversion and CFG cleanup. */
5487 static unsigned int
5488 rest_of_handle_if_conversion (void)
5490 if (flag_if_conversion)
5492 if (dump_file)
5494 dump_reg_info (dump_file);
5495 dump_flow_info (dump_file, dump_flags);
5497 cleanup_cfg (CLEANUP_EXPENSIVE);
5498 if_convert (false);
5501 cleanup_cfg (0);
5502 return 0;
5505 namespace {
5507 const pass_data pass_data_rtl_ifcvt =
5509 RTL_PASS, /* type */
5510 "ce1", /* name */
5511 OPTGROUP_NONE, /* optinfo_flags */
5512 TV_IFCVT, /* tv_id */
5513 0, /* properties_required */
5514 0, /* properties_provided */
5515 0, /* properties_destroyed */
5516 0, /* todo_flags_start */
5517 TODO_df_finish, /* todo_flags_finish */
5520 class pass_rtl_ifcvt : public rtl_opt_pass
5522 public:
5523 pass_rtl_ifcvt (gcc::context *ctxt)
5524 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
5527 /* opt_pass methods: */
5528 virtual bool gate (function *)
5530 return (optimize > 0) && dbg_cnt (if_conversion);
5533 virtual unsigned int execute (function *)
5535 return rest_of_handle_if_conversion ();
5538 }; // class pass_rtl_ifcvt
5540 } // anon namespace
5542 rtl_opt_pass *
5543 make_pass_rtl_ifcvt (gcc::context *ctxt)
5545 return new pass_rtl_ifcvt (ctxt);
5549 /* Rerun if-conversion, as combine may have simplified things enough
5550 to now meet sequence length restrictions. */
5552 namespace {
5554 const pass_data pass_data_if_after_combine =
5556 RTL_PASS, /* type */
5557 "ce2", /* name */
5558 OPTGROUP_NONE, /* optinfo_flags */
5559 TV_IFCVT, /* tv_id */
5560 0, /* properties_required */
5561 0, /* properties_provided */
5562 0, /* properties_destroyed */
5563 0, /* todo_flags_start */
5564 TODO_df_finish, /* todo_flags_finish */
5567 class pass_if_after_combine : public rtl_opt_pass
5569 public:
5570 pass_if_after_combine (gcc::context *ctxt)
5571 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
5574 /* opt_pass methods: */
5575 virtual bool gate (function *)
5577 return optimize > 0 && flag_if_conversion
5578 && dbg_cnt (if_after_combine);
5581 virtual unsigned int execute (function *)
5583 if_convert (true);
5584 return 0;
5587 }; // class pass_if_after_combine
5589 } // anon namespace
5591 rtl_opt_pass *
5592 make_pass_if_after_combine (gcc::context *ctxt)
5594 return new pass_if_after_combine (ctxt);
5598 namespace {
5600 const pass_data pass_data_if_after_reload =
5602 RTL_PASS, /* type */
5603 "ce3", /* name */
5604 OPTGROUP_NONE, /* optinfo_flags */
5605 TV_IFCVT2, /* tv_id */
5606 0, /* properties_required */
5607 0, /* properties_provided */
5608 0, /* properties_destroyed */
5609 0, /* todo_flags_start */
5610 TODO_df_finish, /* todo_flags_finish */
5613 class pass_if_after_reload : public rtl_opt_pass
5615 public:
5616 pass_if_after_reload (gcc::context *ctxt)
5617 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
5620 /* opt_pass methods: */
5621 virtual bool gate (function *)
5623 return optimize > 0 && flag_if_conversion2
5624 && dbg_cnt (if_after_reload);
5627 virtual unsigned int execute (function *)
5629 if_convert (true);
5630 return 0;
5633 }; // class pass_if_after_reload
5635 } // anon namespace
5637 rtl_opt_pass *
5638 make_pass_if_after_reload (gcc::context *ctxt)
5640 return new pass_if_after_reload (ctxt);