/cp
[official-gcc.git] / gcc / ifcvt.c
blob2d97cc578453a0eb50f38180830950b41a76b140
1 /* If-conversion support.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "df.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "except.h"
34 #include "cfgrtl.h"
35 #include "cfganal.h"
36 #include "cfgcleanup.h"
37 #include "alias.h"
38 #include "expmed.h"
39 #include "dojump.h"
40 #include "explow.h"
41 #include "calls.h"
42 #include "emit-rtl.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "diagnostic-core.h"
50 #include "tm_p.h"
51 #include "cfgloop.h"
52 #include "target.h"
53 #include "tree-pass.h"
54 #include "dbgcnt.h"
55 #include "shrink-wrap.h"
56 #include "ifcvt.h"
58 #ifndef HAVE_incscc
59 #define HAVE_incscc 0
60 #endif
61 #ifndef HAVE_decscc
62 #define HAVE_decscc 0
63 #endif
65 #ifndef MAX_CONDITIONAL_EXECUTE
66 #define MAX_CONDITIONAL_EXECUTE \
67 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
68 + 1)
69 #endif
71 #ifndef HAVE_cbranchcc4
72 #define HAVE_cbranchcc4 0
73 #endif
75 #define IFCVT_MULTIPLE_DUMPS 1
77 #define NULL_BLOCK ((basic_block) NULL)
79 /* True if after combine pass. */
80 static bool ifcvt_after_combine;
82 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
83 static int num_possible_if_blocks;
85 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
86 execution. */
87 static int num_updated_if_blocks;
89 /* # of changes made. */
90 static int num_true_changes;
92 /* Whether conditional execution changes were made. */
93 static int cond_exec_changed_p;
95 /* Forward references. */
96 static int count_bb_insns (const_basic_block);
97 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
98 static rtx_insn *first_active_insn (basic_block);
99 static rtx_insn *last_active_insn (basic_block, int);
100 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
101 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
102 static basic_block block_fallthru (basic_block);
103 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
104 int);
105 static rtx cond_exec_get_condition (rtx_insn *);
106 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
107 static int noce_operand_ok (const_rtx);
108 static void merge_if_block (ce_if_block *);
109 static int find_cond_trap (basic_block, edge, edge);
110 static basic_block find_if_header (basic_block, int);
111 static int block_jumps_and_fallthru_p (basic_block, basic_block);
112 static int noce_find_if_block (basic_block, edge, edge, int);
113 static int cond_exec_find_if_block (ce_if_block *);
114 static int find_if_case_1 (basic_block, edge, edge);
115 static int find_if_case_2 (basic_block, edge, edge);
116 static int dead_or_predicable (basic_block, basic_block, basic_block,
117 edge, int);
118 static void noce_emit_move_insn (rtx, rtx);
119 static rtx_insn *block_has_only_trap (basic_block);
121 /* Count the number of non-jump active insns in BB. */
123 static int
124 count_bb_insns (const_basic_block bb)
126 int count = 0;
127 rtx_insn *insn = BB_HEAD (bb);
129 while (1)
131 if (active_insn_p (insn) && !JUMP_P (insn))
132 count++;
134 if (insn == BB_END (bb))
135 break;
136 insn = NEXT_INSN (insn);
139 return count;
142 /* Determine whether the total insn_rtx_cost on non-jump insns in
143 basic block BB is less than MAX_COST. This function returns
144 false if the cost of any instruction could not be estimated.
146 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
147 as those insns are being speculated. MAX_COST is scaled with SCALE
148 plus a small fudge factor. */
150 static bool
151 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
153 int count = 0;
154 rtx_insn *insn = BB_HEAD (bb);
155 bool speed = optimize_bb_for_speed_p (bb);
157 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
158 applied to insn_rtx_cost when optimizing for size. Only do
159 this after combine because if-conversion might interfere with
160 passes before combine.
162 Use optimize_function_for_speed_p instead of the pre-defined
163 variable speed to make sure it is set to same value for all
164 basic blocks in one if-conversion transformation. */
165 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
166 scale = REG_BR_PROB_BASE;
167 /* Our branch probability/scaling factors are just estimates and don't
168 account for cases where we can get speculation for free and other
169 secondary benefits. So we fudge the scale factor to make speculating
170 appear a little more profitable when optimizing for performance. */
171 else
172 scale += REG_BR_PROB_BASE / 8;
175 max_cost *= scale;
177 while (1)
179 if (NONJUMP_INSN_P (insn))
181 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
182 if (cost == 0)
183 return false;
185 /* If this instruction is the load or set of a "stack" register,
186 such as a floating point register on x87, then the cost of
187 speculatively executing this insn may need to include
188 the additional cost of popping its result off of the
189 register stack. Unfortunately, correctly recognizing and
190 accounting for this additional overhead is tricky, so for
191 now we simply prohibit such speculative execution. */
192 #ifdef STACK_REGS
194 rtx set = single_set (insn);
195 if (set && STACK_REG_P (SET_DEST (set)))
196 return false;
198 #endif
200 count += cost;
201 if (count >= max_cost)
202 return false;
204 else if (CALL_P (insn))
205 return false;
207 if (insn == BB_END (bb))
208 break;
209 insn = NEXT_INSN (insn);
212 return true;
215 /* Return the first non-jump active insn in the basic block. */
217 static rtx_insn *
218 first_active_insn (basic_block bb)
220 rtx_insn *insn = BB_HEAD (bb);
222 if (LABEL_P (insn))
224 if (insn == BB_END (bb))
225 return NULL;
226 insn = NEXT_INSN (insn);
229 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
231 if (insn == BB_END (bb))
232 return NULL;
233 insn = NEXT_INSN (insn);
236 if (JUMP_P (insn))
237 return NULL;
239 return insn;
242 /* Return the last non-jump active (non-jump) insn in the basic block. */
244 static rtx_insn *
245 last_active_insn (basic_block bb, int skip_use_p)
247 rtx_insn *insn = BB_END (bb);
248 rtx_insn *head = BB_HEAD (bb);
250 while (NOTE_P (insn)
251 || JUMP_P (insn)
252 || DEBUG_INSN_P (insn)
253 || (skip_use_p
254 && NONJUMP_INSN_P (insn)
255 && GET_CODE (PATTERN (insn)) == USE))
257 if (insn == head)
258 return NULL;
259 insn = PREV_INSN (insn);
262 if (LABEL_P (insn))
263 return NULL;
265 return insn;
268 /* Return the active insn before INSN inside basic block CURR_BB. */
270 static rtx_insn *
271 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
273 if (!insn || insn == BB_HEAD (curr_bb))
274 return NULL;
276 while ((insn = PREV_INSN (insn)) != NULL_RTX)
278 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
279 break;
281 /* No other active insn all the way to the start of the basic block. */
282 if (insn == BB_HEAD (curr_bb))
283 return NULL;
286 return insn;
289 /* Return the active insn after INSN inside basic block CURR_BB. */
291 static rtx_insn *
292 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
294 if (!insn || insn == BB_END (curr_bb))
295 return NULL;
297 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
299 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
300 break;
302 /* No other active insn all the way to the end of the basic block. */
303 if (insn == BB_END (curr_bb))
304 return NULL;
307 return insn;
310 /* Return the basic block reached by falling though the basic block BB. */
312 static basic_block
313 block_fallthru (basic_block bb)
315 edge e = find_fallthru_edge (bb->succs);
317 return (e) ? e->dest : NULL_BLOCK;
320 /* Return true if RTXs A and B can be safely interchanged. */
322 static bool
323 rtx_interchangeable_p (const_rtx a, const_rtx b)
325 if (!rtx_equal_p (a, b))
326 return false;
328 if (GET_CODE (a) != MEM)
329 return true;
331 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
332 reference is not. Interchanging a dead type-unsafe memory reference with
333 a live type-safe one creates a live type-unsafe memory reference, in other
334 words, it makes the program illegal.
335 We check here conservatively whether the two memory references have equal
336 memory attributes. */
338 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
342 /* Go through a bunch of insns, converting them to conditional
343 execution format if possible. Return TRUE if all of the non-note
344 insns were processed. */
346 static int
347 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
348 /* if block information */rtx_insn *start,
349 /* first insn to look at */rtx end,
350 /* last insn to look at */rtx test,
351 /* conditional execution test */int prob_val,
352 /* probability of branch taken. */int mod_ok)
354 int must_be_last = FALSE;
355 rtx_insn *insn;
356 rtx xtest;
357 rtx pattern;
359 if (!start || !end)
360 return FALSE;
362 for (insn = start; ; insn = NEXT_INSN (insn))
364 /* dwarf2out can't cope with conditional prologues. */
365 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
366 return FALSE;
368 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
369 goto insn_done;
371 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
373 /* dwarf2out can't cope with conditional unwind info. */
374 if (RTX_FRAME_RELATED_P (insn))
375 return FALSE;
377 /* Remove USE insns that get in the way. */
378 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
380 /* ??? Ug. Actually unlinking the thing is problematic,
381 given what we'd have to coordinate with our callers. */
382 SET_INSN_DELETED (insn);
383 goto insn_done;
386 /* Last insn wasn't last? */
387 if (must_be_last)
388 return FALSE;
390 if (modified_in_p (test, insn))
392 if (!mod_ok)
393 return FALSE;
394 must_be_last = TRUE;
397 /* Now build the conditional form of the instruction. */
398 pattern = PATTERN (insn);
399 xtest = copy_rtx (test);
401 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
402 two conditions. */
403 if (GET_CODE (pattern) == COND_EXEC)
405 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
406 return FALSE;
408 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
409 COND_EXEC_TEST (pattern));
410 pattern = COND_EXEC_CODE (pattern);
413 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
415 /* If the machine needs to modify the insn being conditionally executed,
416 say for example to force a constant integer operand into a temp
417 register, do so here. */
418 #ifdef IFCVT_MODIFY_INSN
419 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
420 if (! pattern)
421 return FALSE;
422 #endif
424 validate_change (insn, &PATTERN (insn), pattern, 1);
426 if (CALL_P (insn) && prob_val >= 0)
427 validate_change (insn, &REG_NOTES (insn),
428 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
429 prob_val, REG_NOTES (insn)), 1);
431 insn_done:
432 if (insn == end)
433 break;
436 return TRUE;
439 /* Return the condition for a jump. Do not do any special processing. */
441 static rtx
442 cond_exec_get_condition (rtx_insn *jump)
444 rtx test_if, cond;
446 if (any_condjump_p (jump))
447 test_if = SET_SRC (pc_set (jump));
448 else
449 return NULL_RTX;
450 cond = XEXP (test_if, 0);
452 /* If this branches to JUMP_LABEL when the condition is false,
453 reverse the condition. */
454 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
455 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
457 enum rtx_code rev = reversed_comparison_code (cond, jump);
458 if (rev == UNKNOWN)
459 return NULL_RTX;
461 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
462 XEXP (cond, 1));
465 return cond;
468 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
469 to conditional execution. Return TRUE if we were successful at
470 converting the block. */
472 static int
473 cond_exec_process_if_block (ce_if_block * ce_info,
474 /* if block information */int do_multiple_p)
476 basic_block test_bb = ce_info->test_bb; /* last test block */
477 basic_block then_bb = ce_info->then_bb; /* THEN */
478 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
479 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
480 rtx_insn *then_start; /* first insn in THEN block */
481 rtx_insn *then_end; /* last insn + 1 in THEN block */
482 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
483 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
484 int max; /* max # of insns to convert. */
485 int then_mod_ok; /* whether conditional mods are ok in THEN */
486 rtx true_expr; /* test for else block insns */
487 rtx false_expr; /* test for then block insns */
488 int true_prob_val; /* probability of else block */
489 int false_prob_val; /* probability of then block */
490 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
491 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
492 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
493 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
494 int then_n_insns, else_n_insns, n_insns;
495 enum rtx_code false_code;
496 rtx note;
498 /* If test is comprised of && or || elements, and we've failed at handling
499 all of them together, just use the last test if it is the special case of
500 && elements without an ELSE block. */
501 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
503 if (else_bb || ! ce_info->and_and_p)
504 return FALSE;
506 ce_info->test_bb = test_bb = ce_info->last_test_bb;
507 ce_info->num_multiple_test_blocks = 0;
508 ce_info->num_and_and_blocks = 0;
509 ce_info->num_or_or_blocks = 0;
512 /* Find the conditional jump to the ELSE or JOIN part, and isolate
513 the test. */
514 test_expr = cond_exec_get_condition (BB_END (test_bb));
515 if (! test_expr)
516 return FALSE;
518 /* If the conditional jump is more than just a conditional jump,
519 then we can not do conditional execution conversion on this block. */
520 if (! onlyjump_p (BB_END (test_bb)))
521 return FALSE;
523 /* Collect the bounds of where we're to search, skipping any labels, jumps
524 and notes at the beginning and end of the block. Then count the total
525 number of insns and see if it is small enough to convert. */
526 then_start = first_active_insn (then_bb);
527 then_end = last_active_insn (then_bb, TRUE);
528 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
529 n_insns = then_n_insns;
530 max = MAX_CONDITIONAL_EXECUTE;
532 if (else_bb)
534 int n_matching;
536 max *= 2;
537 else_start = first_active_insn (else_bb);
538 else_end = last_active_insn (else_bb, TRUE);
539 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
540 n_insns += else_n_insns;
542 /* Look for matching sequences at the head and tail of the two blocks,
543 and limit the range of insns to be converted if possible. */
544 n_matching = flow_find_cross_jump (then_bb, else_bb,
545 &then_first_tail, &else_first_tail,
546 NULL);
547 if (then_first_tail == BB_HEAD (then_bb))
548 then_start = then_end = NULL;
549 if (else_first_tail == BB_HEAD (else_bb))
550 else_start = else_end = NULL;
552 if (n_matching > 0)
554 if (then_end)
555 then_end = find_active_insn_before (then_bb, then_first_tail);
556 if (else_end)
557 else_end = find_active_insn_before (else_bb, else_first_tail);
558 n_insns -= 2 * n_matching;
561 if (then_start
562 && else_start
563 && then_n_insns > n_matching
564 && else_n_insns > n_matching)
566 int longest_match = MIN (then_n_insns - n_matching,
567 else_n_insns - n_matching);
568 n_matching
569 = flow_find_head_matching_sequence (then_bb, else_bb,
570 &then_last_head,
571 &else_last_head,
572 longest_match);
574 if (n_matching > 0)
576 rtx_insn *insn;
578 /* We won't pass the insns in the head sequence to
579 cond_exec_process_insns, so we need to test them here
580 to make sure that they don't clobber the condition. */
581 for (insn = BB_HEAD (then_bb);
582 insn != NEXT_INSN (then_last_head);
583 insn = NEXT_INSN (insn))
584 if (!LABEL_P (insn) && !NOTE_P (insn)
585 && !DEBUG_INSN_P (insn)
586 && modified_in_p (test_expr, insn))
587 return FALSE;
590 if (then_last_head == then_end)
591 then_start = then_end = NULL;
592 if (else_last_head == else_end)
593 else_start = else_end = NULL;
595 if (n_matching > 0)
597 if (then_start)
598 then_start = find_active_insn_after (then_bb, then_last_head);
599 if (else_start)
600 else_start = find_active_insn_after (else_bb, else_last_head);
601 n_insns -= 2 * n_matching;
606 if (n_insns > max)
607 return FALSE;
609 /* Map test_expr/test_jump into the appropriate MD tests to use on
610 the conditionally executed code. */
612 true_expr = test_expr;
614 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
615 if (false_code != UNKNOWN)
616 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
617 XEXP (true_expr, 0), XEXP (true_expr, 1));
618 else
619 false_expr = NULL_RTX;
621 #ifdef IFCVT_MODIFY_TESTS
622 /* If the machine description needs to modify the tests, such as setting a
623 conditional execution register from a comparison, it can do so here. */
624 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
626 /* See if the conversion failed. */
627 if (!true_expr || !false_expr)
628 goto fail;
629 #endif
631 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
632 if (note)
634 true_prob_val = XINT (note, 0);
635 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
637 else
639 true_prob_val = -1;
640 false_prob_val = -1;
643 /* If we have && or || tests, do them here. These tests are in the adjacent
644 blocks after the first block containing the test. */
645 if (ce_info->num_multiple_test_blocks > 0)
647 basic_block bb = test_bb;
648 basic_block last_test_bb = ce_info->last_test_bb;
650 if (! false_expr)
651 goto fail;
655 rtx_insn *start, *end;
656 rtx t, f;
657 enum rtx_code f_code;
659 bb = block_fallthru (bb);
660 start = first_active_insn (bb);
661 end = last_active_insn (bb, TRUE);
662 if (start
663 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
664 false_prob_val, FALSE))
665 goto fail;
667 /* If the conditional jump is more than just a conditional jump, then
668 we can not do conditional execution conversion on this block. */
669 if (! onlyjump_p (BB_END (bb)))
670 goto fail;
672 /* Find the conditional jump and isolate the test. */
673 t = cond_exec_get_condition (BB_END (bb));
674 if (! t)
675 goto fail;
677 f_code = reversed_comparison_code (t, BB_END (bb));
678 if (f_code == UNKNOWN)
679 goto fail;
681 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
682 if (ce_info->and_and_p)
684 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
685 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
687 else
689 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
690 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
693 /* If the machine description needs to modify the tests, such as
694 setting a conditional execution register from a comparison, it can
695 do so here. */
696 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
697 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
699 /* See if the conversion failed. */
700 if (!t || !f)
701 goto fail;
702 #endif
704 true_expr = t;
705 false_expr = f;
707 while (bb != last_test_bb);
710 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
711 on then THEN block. */
712 then_mod_ok = (else_bb == NULL_BLOCK);
714 /* Go through the THEN and ELSE blocks converting the insns if possible
715 to conditional execution. */
717 if (then_end
718 && (! false_expr
719 || ! cond_exec_process_insns (ce_info, then_start, then_end,
720 false_expr, false_prob_val,
721 then_mod_ok)))
722 goto fail;
724 if (else_bb && else_end
725 && ! cond_exec_process_insns (ce_info, else_start, else_end,
726 true_expr, true_prob_val, TRUE))
727 goto fail;
729 /* If we cannot apply the changes, fail. Do not go through the normal fail
730 processing, since apply_change_group will call cancel_changes. */
731 if (! apply_change_group ())
733 #ifdef IFCVT_MODIFY_CANCEL
734 /* Cancel any machine dependent changes. */
735 IFCVT_MODIFY_CANCEL (ce_info);
736 #endif
737 return FALSE;
740 #ifdef IFCVT_MODIFY_FINAL
741 /* Do any machine dependent final modifications. */
742 IFCVT_MODIFY_FINAL (ce_info);
743 #endif
745 /* Conversion succeeded. */
746 if (dump_file)
747 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
748 n_insns, (n_insns == 1) ? " was" : "s were");
750 /* Merge the blocks! If we had matching sequences, make sure to delete one
751 copy at the appropriate location first: delete the copy in the THEN branch
752 for a tail sequence so that the remaining one is executed last for both
753 branches, and delete the copy in the ELSE branch for a head sequence so
754 that the remaining one is executed first for both branches. */
755 if (then_first_tail)
757 rtx_insn *from = then_first_tail;
758 if (!INSN_P (from))
759 from = find_active_insn_after (then_bb, from);
760 delete_insn_chain (from, BB_END (then_bb), false);
762 if (else_last_head)
763 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
765 merge_if_block (ce_info);
766 cond_exec_changed_p = TRUE;
767 return TRUE;
769 fail:
770 #ifdef IFCVT_MODIFY_CANCEL
771 /* Cancel any machine dependent changes. */
772 IFCVT_MODIFY_CANCEL (ce_info);
773 #endif
775 cancel_changes (0);
776 return FALSE;
779 /* Used by noce_process_if_block to communicate with its subroutines.
781 The subroutines know that A and B may be evaluated freely. They
782 know that X is a register. They should insert new instructions
783 before cond_earliest. */
785 struct noce_if_info
787 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
788 basic_block test_bb, then_bb, else_bb, join_bb;
790 /* The jump that ends TEST_BB. */
791 rtx_insn *jump;
793 /* The jump condition. */
794 rtx cond;
796 /* New insns should be inserted before this one. */
797 rtx_insn *cond_earliest;
799 /* Insns in the THEN and ELSE block. There is always just this
800 one insns in those blocks. The insns are single_set insns.
801 If there was no ELSE block, INSN_B is the last insn before
802 COND_EARLIEST, or NULL_RTX. In the former case, the insn
803 operands are still valid, as if INSN_B was moved down below
804 the jump. */
805 rtx_insn *insn_a, *insn_b;
807 /* The SET_SRC of INSN_A and INSN_B. */
808 rtx a, b;
810 /* The SET_DEST of INSN_A. */
811 rtx x;
813 /* True if this if block is not canonical. In the canonical form of
814 if blocks, the THEN_BB is the block reached via the fallthru edge
815 from TEST_BB. For the noce transformations, we allow the symmetric
816 form as well. */
817 bool then_else_reversed;
819 /* Estimated cost of the particular branch instruction. */
820 int branch_cost;
823 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
824 static int noce_try_move (struct noce_if_info *);
825 static int noce_try_store_flag (struct noce_if_info *);
826 static int noce_try_addcc (struct noce_if_info *);
827 static int noce_try_store_flag_constants (struct noce_if_info *);
828 static int noce_try_store_flag_mask (struct noce_if_info *);
829 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
830 rtx, rtx, rtx);
831 static int noce_try_cmove (struct noce_if_info *);
832 static int noce_try_cmove_arith (struct noce_if_info *);
833 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
834 static int noce_try_minmax (struct noce_if_info *);
835 static int noce_try_abs (struct noce_if_info *);
836 static int noce_try_sign_mask (struct noce_if_info *);
838 /* Helper function for noce_try_store_flag*. */
840 static rtx
841 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
842 int normalize)
844 rtx cond = if_info->cond;
845 int cond_complex;
846 enum rtx_code code;
848 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
849 || ! general_operand (XEXP (cond, 1), VOIDmode));
851 /* If earliest == jump, or when the condition is complex, try to
852 build the store_flag insn directly. */
854 if (cond_complex)
856 rtx set = pc_set (if_info->jump);
857 cond = XEXP (SET_SRC (set), 0);
858 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
859 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
860 reversep = !reversep;
861 if (if_info->then_else_reversed)
862 reversep = !reversep;
865 if (reversep)
866 code = reversed_comparison_code (cond, if_info->jump);
867 else
868 code = GET_CODE (cond);
870 if ((if_info->cond_earliest == if_info->jump || cond_complex)
871 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
873 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
874 XEXP (cond, 1));
875 rtx set = gen_rtx_SET (x, src);
877 start_sequence ();
878 rtx_insn *insn = emit_insn (set);
880 if (recog_memoized (insn) >= 0)
882 rtx_insn *seq = get_insns ();
883 end_sequence ();
884 emit_insn (seq);
886 if_info->cond_earliest = if_info->jump;
888 return x;
891 end_sequence ();
894 /* Don't even try if the comparison operands or the mode of X are weird. */
895 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
896 return NULL_RTX;
898 return emit_store_flag (x, code, XEXP (cond, 0),
899 XEXP (cond, 1), VOIDmode,
900 (code == LTU || code == LEU
901 || code == GEU || code == GTU), normalize);
904 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
905 X is the destination/target and Y is the value to copy. */
907 static void
908 noce_emit_move_insn (rtx x, rtx y)
910 machine_mode outmode;
911 rtx outer, inner;
912 int bitpos;
914 if (GET_CODE (x) != STRICT_LOW_PART)
916 rtx_insn *seq, *insn;
917 rtx target;
918 optab ot;
920 start_sequence ();
921 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
922 otherwise construct a suitable SET pattern ourselves. */
923 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
924 ? emit_move_insn (x, y)
925 : emit_insn (gen_rtx_SET (x, y));
926 seq = get_insns ();
927 end_sequence ();
929 if (recog_memoized (insn) <= 0)
931 if (GET_CODE (x) == ZERO_EXTRACT)
933 rtx op = XEXP (x, 0);
934 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
935 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
937 /* store_bit_field expects START to be relative to
938 BYTES_BIG_ENDIAN and adjusts this value for machines with
939 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
940 invoke store_bit_field again it is necessary to have the START
941 value from the first call. */
942 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
944 if (MEM_P (op))
945 start = BITS_PER_UNIT - start - size;
946 else
948 gcc_assert (REG_P (op));
949 start = BITS_PER_WORD - start - size;
953 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
954 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
955 return;
958 switch (GET_RTX_CLASS (GET_CODE (y)))
960 case RTX_UNARY:
961 ot = code_to_optab (GET_CODE (y));
962 if (ot)
964 start_sequence ();
965 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
966 if (target != NULL_RTX)
968 if (target != x)
969 emit_move_insn (x, target);
970 seq = get_insns ();
972 end_sequence ();
974 break;
976 case RTX_BIN_ARITH:
977 case RTX_COMM_ARITH:
978 ot = code_to_optab (GET_CODE (y));
979 if (ot)
981 start_sequence ();
982 target = expand_binop (GET_MODE (y), ot,
983 XEXP (y, 0), XEXP (y, 1),
984 x, 0, OPTAB_DIRECT);
985 if (target != NULL_RTX)
987 if (target != x)
988 emit_move_insn (x, target);
989 seq = get_insns ();
991 end_sequence ();
993 break;
995 default:
996 break;
1000 emit_insn (seq);
1001 return;
1004 outer = XEXP (x, 0);
1005 inner = XEXP (outer, 0);
1006 outmode = GET_MODE (outer);
1007 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1008 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1009 0, 0, outmode, y);
1012 /* Return the CC reg if it is used in COND. */
1014 static rtx
1015 cc_in_cond (rtx cond)
1017 if (HAVE_cbranchcc4 && cond
1018 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1019 return XEXP (cond, 0);
1021 return NULL_RTX;
1024 /* Return sequence of instructions generated by if conversion. This
1025 function calls end_sequence() to end the current stream, ensures
1026 that the instructions are unshared, recognizable non-jump insns.
1027 On failure, this function returns a NULL_RTX. */
1029 static rtx_insn *
1030 end_ifcvt_sequence (struct noce_if_info *if_info)
1032 rtx_insn *insn;
1033 rtx_insn *seq = get_insns ();
1034 rtx cc = cc_in_cond (if_info->cond);
1036 set_used_flags (if_info->x);
1037 set_used_flags (if_info->cond);
1038 set_used_flags (if_info->a);
1039 set_used_flags (if_info->b);
1040 unshare_all_rtl_in_chain (seq);
1041 end_sequence ();
1043 /* Make sure that all of the instructions emitted are recognizable,
1044 and that we haven't introduced a new jump instruction.
1045 As an exercise for the reader, build a general mechanism that
1046 allows proper placement of required clobbers. */
1047 for (insn = seq; insn; insn = NEXT_INSN (insn))
1048 if (JUMP_P (insn)
1049 || recog_memoized (insn) == -1
1050 /* Make sure new generated code does not clobber CC. */
1051 || (cc && set_of (cc, insn)))
1052 return NULL;
1054 return seq;
1057 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1058 "if (a == b) x = a; else x = b" into "x = b". */
1060 static int
1061 noce_try_move (struct noce_if_info *if_info)
1063 rtx cond = if_info->cond;
1064 enum rtx_code code = GET_CODE (cond);
1065 rtx y;
1066 rtx_insn *seq;
1068 if (code != NE && code != EQ)
1069 return FALSE;
1071 /* This optimization isn't valid if either A or B could be a NaN
1072 or a signed zero. */
1073 if (HONOR_NANS (if_info->x)
1074 || HONOR_SIGNED_ZEROS (if_info->x))
1075 return FALSE;
1077 /* Check whether the operands of the comparison are A and in
1078 either order. */
1079 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1080 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1081 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1082 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1084 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1085 return FALSE;
1087 y = (code == EQ) ? if_info->a : if_info->b;
1089 /* Avoid generating the move if the source is the destination. */
1090 if (! rtx_equal_p (if_info->x, y))
1092 start_sequence ();
1093 noce_emit_move_insn (if_info->x, y);
1094 seq = end_ifcvt_sequence (if_info);
1095 if (!seq)
1096 return FALSE;
1098 emit_insn_before_setloc (seq, if_info->jump,
1099 INSN_LOCATION (if_info->insn_a));
1101 return TRUE;
1103 return FALSE;
1106 /* Convert "if (test) x = 1; else x = 0".
1108 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1109 tried in noce_try_store_flag_constants after noce_try_cmove has had
1110 a go at the conversion. */
1112 static int
1113 noce_try_store_flag (struct noce_if_info *if_info)
1115 int reversep;
1116 rtx target;
1117 rtx_insn *seq;
1119 if (CONST_INT_P (if_info->b)
1120 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1121 && if_info->a == const0_rtx)
1122 reversep = 0;
1123 else if (if_info->b == const0_rtx
1124 && CONST_INT_P (if_info->a)
1125 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1126 && (reversed_comparison_code (if_info->cond, if_info->jump)
1127 != UNKNOWN))
1128 reversep = 1;
1129 else
1130 return FALSE;
1132 start_sequence ();
1134 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1135 if (target)
1137 if (target != if_info->x)
1138 noce_emit_move_insn (if_info->x, target);
1140 seq = end_ifcvt_sequence (if_info);
1141 if (! seq)
1142 return FALSE;
1144 emit_insn_before_setloc (seq, if_info->jump,
1145 INSN_LOCATION (if_info->insn_a));
1146 return TRUE;
1148 else
1150 end_sequence ();
1151 return FALSE;
1155 /* Convert "if (test) x = a; else x = b", for A and B constant. */
1157 static int
1158 noce_try_store_flag_constants (struct noce_if_info *if_info)
1160 rtx target;
1161 rtx_insn *seq;
1162 int reversep;
1163 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1164 int normalize, can_reverse;
1165 machine_mode mode;
1167 if (CONST_INT_P (if_info->a)
1168 && CONST_INT_P (if_info->b))
1170 mode = GET_MODE (if_info->x);
1171 ifalse = INTVAL (if_info->a);
1172 itrue = INTVAL (if_info->b);
1174 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1175 /* Make sure we can represent the difference between the two values. */
1176 if ((diff > 0)
1177 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1178 return FALSE;
1180 diff = trunc_int_for_mode (diff, mode);
1182 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1183 != UNKNOWN);
1185 reversep = 0;
1186 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1187 normalize = 0;
1188 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1189 && (STORE_FLAG_VALUE == 1
1190 || if_info->branch_cost >= 2))
1191 normalize = 1;
1192 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1193 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1194 normalize = 1, reversep = 1;
1195 else if (itrue == -1
1196 && (STORE_FLAG_VALUE == -1
1197 || if_info->branch_cost >= 2))
1198 normalize = -1;
1199 else if (ifalse == -1 && can_reverse
1200 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1201 normalize = -1, reversep = 1;
1202 else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1203 || if_info->branch_cost >= 3)
1204 normalize = -1;
1205 else
1206 return FALSE;
1208 if (reversep)
1210 std::swap (itrue, ifalse);
1211 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1214 start_sequence ();
1215 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1216 if (! target)
1218 end_sequence ();
1219 return FALSE;
1222 /* if (test) x = 3; else x = 4;
1223 => x = 3 + (test == 0); */
1224 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1226 target = expand_simple_binop (mode,
1227 (diff == STORE_FLAG_VALUE
1228 ? PLUS : MINUS),
1229 gen_int_mode (ifalse, mode), target,
1230 if_info->x, 0, OPTAB_WIDEN);
1233 /* if (test) x = 8; else x = 0;
1234 => x = (test != 0) << 3; */
1235 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1237 target = expand_simple_binop (mode, ASHIFT,
1238 target, GEN_INT (tmp), if_info->x, 0,
1239 OPTAB_WIDEN);
1242 /* if (test) x = -1; else x = b;
1243 => x = -(test != 0) | b; */
1244 else if (itrue == -1)
1246 target = expand_simple_binop (mode, IOR,
1247 target, gen_int_mode (ifalse, mode),
1248 if_info->x, 0, OPTAB_WIDEN);
1251 /* if (test) x = a; else x = b;
1252 => x = (-(test != 0) & (b - a)) + a; */
1253 else
1255 target = expand_simple_binop (mode, AND,
1256 target, gen_int_mode (diff, mode),
1257 if_info->x, 0, OPTAB_WIDEN);
1258 if (target)
1259 target = expand_simple_binop (mode, PLUS,
1260 target, gen_int_mode (ifalse, mode),
1261 if_info->x, 0, OPTAB_WIDEN);
1264 if (! target)
1266 end_sequence ();
1267 return FALSE;
1270 if (target != if_info->x)
1271 noce_emit_move_insn (if_info->x, target);
1273 seq = end_ifcvt_sequence (if_info);
1274 if (!seq)
1275 return FALSE;
1277 emit_insn_before_setloc (seq, if_info->jump,
1278 INSN_LOCATION (if_info->insn_a));
1279 return TRUE;
1282 return FALSE;
1285 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1286 similarly for "foo--". */
1288 static int
1289 noce_try_addcc (struct noce_if_info *if_info)
1291 rtx target;
1292 rtx_insn *seq;
1293 int subtract, normalize;
1295 if (GET_CODE (if_info->a) == PLUS
1296 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1297 && (reversed_comparison_code (if_info->cond, if_info->jump)
1298 != UNKNOWN))
1300 rtx cond = if_info->cond;
1301 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1303 /* First try to use addcc pattern. */
1304 if (general_operand (XEXP (cond, 0), VOIDmode)
1305 && general_operand (XEXP (cond, 1), VOIDmode))
1307 start_sequence ();
1308 target = emit_conditional_add (if_info->x, code,
1309 XEXP (cond, 0),
1310 XEXP (cond, 1),
1311 VOIDmode,
1312 if_info->b,
1313 XEXP (if_info->a, 1),
1314 GET_MODE (if_info->x),
1315 (code == LTU || code == GEU
1316 || code == LEU || code == GTU));
1317 if (target)
1319 if (target != if_info->x)
1320 noce_emit_move_insn (if_info->x, target);
1322 seq = end_ifcvt_sequence (if_info);
1323 if (!seq)
1324 return FALSE;
1326 emit_insn_before_setloc (seq, if_info->jump,
1327 INSN_LOCATION (if_info->insn_a));
1328 return TRUE;
1330 end_sequence ();
1333 /* If that fails, construct conditional increment or decrement using
1334 setcc. */
1335 if (if_info->branch_cost >= 2
1336 && (XEXP (if_info->a, 1) == const1_rtx
1337 || XEXP (if_info->a, 1) == constm1_rtx))
1339 start_sequence ();
1340 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1341 subtract = 0, normalize = 0;
1342 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1343 subtract = 1, normalize = 0;
1344 else
1345 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1348 target = noce_emit_store_flag (if_info,
1349 gen_reg_rtx (GET_MODE (if_info->x)),
1350 1, normalize);
1352 if (target)
1353 target = expand_simple_binop (GET_MODE (if_info->x),
1354 subtract ? MINUS : PLUS,
1355 if_info->b, target, if_info->x,
1356 0, OPTAB_WIDEN);
1357 if (target)
1359 if (target != if_info->x)
1360 noce_emit_move_insn (if_info->x, target);
1362 seq = end_ifcvt_sequence (if_info);
1363 if (!seq)
1364 return FALSE;
1366 emit_insn_before_setloc (seq, if_info->jump,
1367 INSN_LOCATION (if_info->insn_a));
1368 return TRUE;
1370 end_sequence ();
1374 return FALSE;
1377 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1379 static int
1380 noce_try_store_flag_mask (struct noce_if_info *if_info)
1382 rtx target;
1383 rtx_insn *seq;
1384 int reversep;
1386 reversep = 0;
1387 if ((if_info->branch_cost >= 2
1388 || STORE_FLAG_VALUE == -1)
1389 && ((if_info->a == const0_rtx
1390 && rtx_equal_p (if_info->b, if_info->x))
1391 || ((reversep = (reversed_comparison_code (if_info->cond,
1392 if_info->jump)
1393 != UNKNOWN))
1394 && if_info->b == const0_rtx
1395 && rtx_equal_p (if_info->a, if_info->x))))
1397 start_sequence ();
1398 target = noce_emit_store_flag (if_info,
1399 gen_reg_rtx (GET_MODE (if_info->x)),
1400 reversep, -1);
1401 if (target)
1402 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1403 if_info->x,
1404 target, if_info->x, 0,
1405 OPTAB_WIDEN);
1407 if (target)
1409 int old_cost, new_cost, insn_cost;
1410 int speed_p;
1412 if (target != if_info->x)
1413 noce_emit_move_insn (if_info->x, target);
1415 seq = end_ifcvt_sequence (if_info);
1416 if (!seq)
1417 return FALSE;
1419 speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1420 insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1421 old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1422 new_cost = seq_cost (seq, speed_p);
1424 if (new_cost > old_cost)
1425 return FALSE;
1427 emit_insn_before_setloc (seq, if_info->jump,
1428 INSN_LOCATION (if_info->insn_a));
1429 return TRUE;
1432 end_sequence ();
1435 return FALSE;
1438 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1440 static rtx
1441 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1442 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1444 rtx target ATTRIBUTE_UNUSED;
1445 int unsignedp ATTRIBUTE_UNUSED;
1447 /* If earliest == jump, try to build the cmove insn directly.
1448 This is helpful when combine has created some complex condition
1449 (like for alpha's cmovlbs) that we can't hope to regenerate
1450 through the normal interface. */
1452 if (if_info->cond_earliest == if_info->jump)
1454 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1455 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1456 cond, vtrue, vfalse);
1457 rtx set = gen_rtx_SET (x, if_then_else);
1459 start_sequence ();
1460 rtx_insn *insn = emit_insn (set);
1462 if (recog_memoized (insn) >= 0)
1464 rtx_insn *seq = get_insns ();
1465 end_sequence ();
1466 emit_insn (seq);
1468 return x;
1471 end_sequence ();
1474 /* Don't even try if the comparison operands are weird
1475 except that the target supports cbranchcc4. */
1476 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1477 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1479 if (!(HAVE_cbranchcc4)
1480 || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1481 || cmp_b != const0_rtx)
1482 return NULL_RTX;
1485 unsignedp = (code == LTU || code == GEU
1486 || code == LEU || code == GTU);
1488 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1489 vtrue, vfalse, GET_MODE (x),
1490 unsignedp);
1491 if (target)
1492 return target;
1494 /* We might be faced with a situation like:
1496 x = (reg:M TARGET)
1497 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1498 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1500 We can't do a conditional move in mode M, but it's possible that we
1501 could do a conditional move in mode N instead and take a subreg of
1502 the result.
1504 If we can't create new pseudos, though, don't bother. */
1505 if (reload_completed)
1506 return NULL_RTX;
1508 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1510 rtx reg_vtrue = SUBREG_REG (vtrue);
1511 rtx reg_vfalse = SUBREG_REG (vfalse);
1512 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1513 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1514 rtx promoted_target;
1516 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1517 || byte_vtrue != byte_vfalse
1518 || (SUBREG_PROMOTED_VAR_P (vtrue)
1519 != SUBREG_PROMOTED_VAR_P (vfalse))
1520 || (SUBREG_PROMOTED_GET (vtrue)
1521 != SUBREG_PROMOTED_GET (vfalse)))
1522 return NULL_RTX;
1524 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1526 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1527 VOIDmode, reg_vtrue, reg_vfalse,
1528 GET_MODE (reg_vtrue), unsignedp);
1529 /* Nope, couldn't do it in that mode either. */
1530 if (!target)
1531 return NULL_RTX;
1533 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1534 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1535 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1536 emit_move_insn (x, target);
1537 return x;
1539 else
1540 return NULL_RTX;
1543 /* Try only simple constants and registers here. More complex cases
1544 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1545 has had a go at it. */
1547 static int
1548 noce_try_cmove (struct noce_if_info *if_info)
1550 enum rtx_code code;
1551 rtx target;
1552 rtx_insn *seq;
1554 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1555 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1557 start_sequence ();
1559 code = GET_CODE (if_info->cond);
1560 target = noce_emit_cmove (if_info, if_info->x, code,
1561 XEXP (if_info->cond, 0),
1562 XEXP (if_info->cond, 1),
1563 if_info->a, if_info->b);
1565 if (target)
1567 if (target != if_info->x)
1568 noce_emit_move_insn (if_info->x, target);
1570 seq = end_ifcvt_sequence (if_info);
1571 if (!seq)
1572 return FALSE;
1574 emit_insn_before_setloc (seq, if_info->jump,
1575 INSN_LOCATION (if_info->insn_a));
1576 return TRUE;
1578 else
1580 end_sequence ();
1581 return FALSE;
1585 return FALSE;
1588 /* Try more complex cases involving conditional_move. */
1590 static int
1591 noce_try_cmove_arith (struct noce_if_info *if_info)
1593 rtx a = if_info->a;
1594 rtx b = if_info->b;
1595 rtx x = if_info->x;
1596 rtx orig_a, orig_b;
1597 rtx_insn *insn_a, *insn_b;
1598 rtx target;
1599 int is_mem = 0;
1600 int insn_cost;
1601 enum rtx_code code;
1602 rtx_insn *ifcvt_seq;
1604 /* A conditional move from two memory sources is equivalent to a
1605 conditional on their addresses followed by a load. Don't do this
1606 early because it'll screw alias analysis. Note that we've
1607 already checked for no side effects. */
1608 /* ??? FIXME: Magic number 5. */
1609 if (cse_not_expected
1610 && MEM_P (a) && MEM_P (b)
1611 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1612 && if_info->branch_cost >= 5)
1614 machine_mode address_mode = get_address_mode (a);
1616 a = XEXP (a, 0);
1617 b = XEXP (b, 0);
1618 x = gen_reg_rtx (address_mode);
1619 is_mem = 1;
1622 /* ??? We could handle this if we knew that a load from A or B could
1623 not trap or fault. This is also true if we've already loaded
1624 from the address along the path from ENTRY. */
1625 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1626 return FALSE;
1628 /* if (test) x = a + b; else x = c - d;
1629 => y = a + b;
1630 x = c - d;
1631 if (test)
1632 x = y;
1635 code = GET_CODE (if_info->cond);
1636 insn_a = if_info->insn_a;
1637 insn_b = if_info->insn_b;
1639 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1640 if insn_rtx_cost can't be estimated. */
1641 if (insn_a)
1643 insn_cost
1644 = insn_rtx_cost (PATTERN (insn_a),
1645 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1646 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1647 return FALSE;
1649 else
1650 insn_cost = 0;
1652 if (insn_b)
1654 insn_cost
1655 += insn_rtx_cost (PATTERN (insn_b),
1656 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1657 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1658 return FALSE;
1661 /* Possibly rearrange operands to make things come out more natural. */
1662 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1664 int reversep = 0;
1665 if (rtx_equal_p (b, x))
1666 reversep = 1;
1667 else if (general_operand (b, GET_MODE (b)))
1668 reversep = 1;
1670 if (reversep)
1672 code = reversed_comparison_code (if_info->cond, if_info->jump);
1673 std::swap (a, b);
1674 std::swap (insn_a, insn_b);
1678 start_sequence ();
1680 orig_a = a;
1681 orig_b = b;
1683 /* If either operand is complex, load it into a register first.
1684 The best way to do this is to copy the original insn. In this
1685 way we preserve any clobbers etc that the insn may have had.
1686 This is of course not possible in the IS_MEM case. */
1687 if (! general_operand (a, GET_MODE (a)))
1689 rtx_insn *insn;
1691 if (is_mem)
1693 rtx reg = gen_reg_rtx (GET_MODE (a));
1694 insn = emit_insn (gen_rtx_SET (reg, a));
1696 else if (! insn_a)
1697 goto end_seq_and_fail;
1698 else
1700 a = gen_reg_rtx (GET_MODE (a));
1701 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1702 rtx set = single_set (copy_of_a);
1703 SET_DEST (set) = a;
1704 insn = emit_insn (PATTERN (copy_of_a));
1706 if (recog_memoized (insn) < 0)
1707 goto end_seq_and_fail;
1709 if (! general_operand (b, GET_MODE (b)))
1711 rtx pat;
1712 rtx_insn *last;
1713 rtx_insn *new_insn;
1715 if (is_mem)
1717 rtx reg = gen_reg_rtx (GET_MODE (b));
1718 pat = gen_rtx_SET (reg, b);
1720 else if (! insn_b)
1721 goto end_seq_and_fail;
1722 else
1724 b = gen_reg_rtx (GET_MODE (b));
1725 rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1726 rtx set = single_set (copy_of_insn_b);
1727 SET_DEST (set) = b;
1728 pat = PATTERN (copy_of_insn_b);
1731 /* If insn to set up A clobbers any registers B depends on, try to
1732 swap insn that sets up A with the one that sets up B. If even
1733 that doesn't help, punt. */
1734 last = get_last_insn ();
1735 if (last && modified_in_p (orig_b, last))
1737 new_insn = emit_insn_before (pat, get_insns ());
1738 if (modified_in_p (orig_a, new_insn))
1739 goto end_seq_and_fail;
1741 else
1742 new_insn = emit_insn (pat);
1744 if (recog_memoized (new_insn) < 0)
1745 goto end_seq_and_fail;
1748 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1749 XEXP (if_info->cond, 1), a, b);
1751 if (! target)
1752 goto end_seq_and_fail;
1754 /* If we're handling a memory for above, emit the load now. */
1755 if (is_mem)
1757 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1759 /* Copy over flags as appropriate. */
1760 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1761 MEM_VOLATILE_P (mem) = 1;
1762 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1763 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1764 set_mem_align (mem,
1765 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1767 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1768 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1770 noce_emit_move_insn (if_info->x, mem);
1772 else if (target != x)
1773 noce_emit_move_insn (x, target);
1775 ifcvt_seq = end_ifcvt_sequence (if_info);
1776 if (!ifcvt_seq)
1777 return FALSE;
1779 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1780 INSN_LOCATION (if_info->insn_a));
1781 return TRUE;
1783 end_seq_and_fail:
1784 end_sequence ();
1785 return FALSE;
1788 /* For most cases, the simplified condition we found is the best
1789 choice, but this is not the case for the min/max/abs transforms.
1790 For these we wish to know that it is A or B in the condition. */
1792 static rtx
1793 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1794 rtx_insn **earliest)
1796 rtx cond, set;
1797 rtx_insn *insn;
1798 int reverse;
1800 /* If target is already mentioned in the known condition, return it. */
1801 if (reg_mentioned_p (target, if_info->cond))
1803 *earliest = if_info->cond_earliest;
1804 return if_info->cond;
1807 set = pc_set (if_info->jump);
1808 cond = XEXP (SET_SRC (set), 0);
1809 reverse
1810 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1811 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1812 if (if_info->then_else_reversed)
1813 reverse = !reverse;
1815 /* If we're looking for a constant, try to make the conditional
1816 have that constant in it. There are two reasons why it may
1817 not have the constant we want:
1819 1. GCC may have needed to put the constant in a register, because
1820 the target can't compare directly against that constant. For
1821 this case, we look for a SET immediately before the comparison
1822 that puts a constant in that register.
1824 2. GCC may have canonicalized the conditional, for example
1825 replacing "if x < 4" with "if x <= 3". We can undo that (or
1826 make equivalent types of changes) to get the constants we need
1827 if they're off by one in the right direction. */
1829 if (CONST_INT_P (target))
1831 enum rtx_code code = GET_CODE (if_info->cond);
1832 rtx op_a = XEXP (if_info->cond, 0);
1833 rtx op_b = XEXP (if_info->cond, 1);
1834 rtx_insn *prev_insn;
1836 /* First, look to see if we put a constant in a register. */
1837 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1838 if (prev_insn
1839 && BLOCK_FOR_INSN (prev_insn)
1840 == BLOCK_FOR_INSN (if_info->cond_earliest)
1841 && INSN_P (prev_insn)
1842 && GET_CODE (PATTERN (prev_insn)) == SET)
1844 rtx src = find_reg_equal_equiv_note (prev_insn);
1845 if (!src)
1846 src = SET_SRC (PATTERN (prev_insn));
1847 if (CONST_INT_P (src))
1849 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1850 op_a = src;
1851 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1852 op_b = src;
1854 if (CONST_INT_P (op_a))
1856 std::swap (op_a, op_b);
1857 code = swap_condition (code);
1862 /* Now, look to see if we can get the right constant by
1863 adjusting the conditional. */
1864 if (CONST_INT_P (op_b))
1866 HOST_WIDE_INT desired_val = INTVAL (target);
1867 HOST_WIDE_INT actual_val = INTVAL (op_b);
1869 switch (code)
1871 case LT:
1872 if (actual_val == desired_val + 1)
1874 code = LE;
1875 op_b = GEN_INT (desired_val);
1877 break;
1878 case LE:
1879 if (actual_val == desired_val - 1)
1881 code = LT;
1882 op_b = GEN_INT (desired_val);
1884 break;
1885 case GT:
1886 if (actual_val == desired_val - 1)
1888 code = GE;
1889 op_b = GEN_INT (desired_val);
1891 break;
1892 case GE:
1893 if (actual_val == desired_val + 1)
1895 code = GT;
1896 op_b = GEN_INT (desired_val);
1898 break;
1899 default:
1900 break;
1904 /* If we made any changes, generate a new conditional that is
1905 equivalent to what we started with, but has the right
1906 constants in it. */
1907 if (code != GET_CODE (if_info->cond)
1908 || op_a != XEXP (if_info->cond, 0)
1909 || op_b != XEXP (if_info->cond, 1))
1911 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1912 *earliest = if_info->cond_earliest;
1913 return cond;
1917 cond = canonicalize_condition (if_info->jump, cond, reverse,
1918 earliest, target, HAVE_cbranchcc4, true);
1919 if (! cond || ! reg_mentioned_p (target, cond))
1920 return NULL;
1922 /* We almost certainly searched back to a different place.
1923 Need to re-verify correct lifetimes. */
1925 /* X may not be mentioned in the range (cond_earliest, jump]. */
1926 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1927 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1928 return NULL;
1930 /* A and B may not be modified in the range [cond_earliest, jump). */
1931 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1932 if (INSN_P (insn)
1933 && (modified_in_p (if_info->a, insn)
1934 || modified_in_p (if_info->b, insn)))
1935 return NULL;
1937 return cond;
1940 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1942 static int
1943 noce_try_minmax (struct noce_if_info *if_info)
1945 rtx cond, target;
1946 rtx_insn *earliest, *seq;
1947 enum rtx_code code, op;
1948 int unsignedp;
1950 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1951 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1952 to get the target to tell us... */
1953 if (HONOR_SIGNED_ZEROS (if_info->x)
1954 || HONOR_NANS (if_info->x))
1955 return FALSE;
1957 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1958 if (!cond)
1959 return FALSE;
1961 /* Verify the condition is of the form we expect, and canonicalize
1962 the comparison code. */
1963 code = GET_CODE (cond);
1964 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1966 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1967 return FALSE;
1969 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1971 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1972 return FALSE;
1973 code = swap_condition (code);
1975 else
1976 return FALSE;
1978 /* Determine what sort of operation this is. Note that the code is for
1979 a taken branch, so the code->operation mapping appears backwards. */
1980 switch (code)
1982 case LT:
1983 case LE:
1984 case UNLT:
1985 case UNLE:
1986 op = SMAX;
1987 unsignedp = 0;
1988 break;
1989 case GT:
1990 case GE:
1991 case UNGT:
1992 case UNGE:
1993 op = SMIN;
1994 unsignedp = 0;
1995 break;
1996 case LTU:
1997 case LEU:
1998 op = UMAX;
1999 unsignedp = 1;
2000 break;
2001 case GTU:
2002 case GEU:
2003 op = UMIN;
2004 unsignedp = 1;
2005 break;
2006 default:
2007 return FALSE;
2010 start_sequence ();
2012 target = expand_simple_binop (GET_MODE (if_info->x), op,
2013 if_info->a, if_info->b,
2014 if_info->x, unsignedp, OPTAB_WIDEN);
2015 if (! target)
2017 end_sequence ();
2018 return FALSE;
2020 if (target != if_info->x)
2021 noce_emit_move_insn (if_info->x, target);
2023 seq = end_ifcvt_sequence (if_info);
2024 if (!seq)
2025 return FALSE;
2027 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2028 if_info->cond = cond;
2029 if_info->cond_earliest = earliest;
2031 return TRUE;
2034 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2035 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2036 etc. */
2038 static int
2039 noce_try_abs (struct noce_if_info *if_info)
2041 rtx cond, target, a, b, c;
2042 rtx_insn *earliest, *seq;
2043 int negate;
2044 bool one_cmpl = false;
2046 /* Reject modes with signed zeros. */
2047 if (HONOR_SIGNED_ZEROS (if_info->x))
2048 return FALSE;
2050 /* Recognize A and B as constituting an ABS or NABS. The canonical
2051 form is a branch around the negation, taken when the object is the
2052 first operand of a comparison against 0 that evaluates to true. */
2053 a = if_info->a;
2054 b = if_info->b;
2055 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2056 negate = 0;
2057 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2059 std::swap (a, b);
2060 negate = 1;
2062 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2064 negate = 0;
2065 one_cmpl = true;
2067 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2069 std::swap (a, b);
2070 negate = 1;
2071 one_cmpl = true;
2073 else
2074 return FALSE;
2076 cond = noce_get_alt_condition (if_info, b, &earliest);
2077 if (!cond)
2078 return FALSE;
2080 /* Verify the condition is of the form we expect. */
2081 if (rtx_equal_p (XEXP (cond, 0), b))
2082 c = XEXP (cond, 1);
2083 else if (rtx_equal_p (XEXP (cond, 1), b))
2085 c = XEXP (cond, 0);
2086 negate = !negate;
2088 else
2089 return FALSE;
2091 /* Verify that C is zero. Search one step backward for a
2092 REG_EQUAL note or a simple source if necessary. */
2093 if (REG_P (c))
2095 rtx set;
2096 rtx_insn *insn = prev_nonnote_insn (earliest);
2097 if (insn
2098 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2099 && (set = single_set (insn))
2100 && rtx_equal_p (SET_DEST (set), c))
2102 rtx note = find_reg_equal_equiv_note (insn);
2103 if (note)
2104 c = XEXP (note, 0);
2105 else
2106 c = SET_SRC (set);
2108 else
2109 return FALSE;
2111 if (MEM_P (c)
2112 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2113 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2114 c = get_pool_constant (XEXP (c, 0));
2116 /* Work around funny ideas get_condition has wrt canonicalization.
2117 Note that these rtx constants are known to be CONST_INT, and
2118 therefore imply integer comparisons. */
2119 if (c == constm1_rtx && GET_CODE (cond) == GT)
2121 else if (c == const1_rtx && GET_CODE (cond) == LT)
2123 else if (c != CONST0_RTX (GET_MODE (b)))
2124 return FALSE;
2126 /* Determine what sort of operation this is. */
2127 switch (GET_CODE (cond))
2129 case LT:
2130 case LE:
2131 case UNLT:
2132 case UNLE:
2133 negate = !negate;
2134 break;
2135 case GT:
2136 case GE:
2137 case UNGT:
2138 case UNGE:
2139 break;
2140 default:
2141 return FALSE;
2144 start_sequence ();
2145 if (one_cmpl)
2146 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2147 if_info->x);
2148 else
2149 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2151 /* ??? It's a quandary whether cmove would be better here, especially
2152 for integers. Perhaps combine will clean things up. */
2153 if (target && negate)
2155 if (one_cmpl)
2156 target = expand_simple_unop (GET_MODE (target), NOT, target,
2157 if_info->x, 0);
2158 else
2159 target = expand_simple_unop (GET_MODE (target), NEG, target,
2160 if_info->x, 0);
2163 if (! target)
2165 end_sequence ();
2166 return FALSE;
2169 if (target != if_info->x)
2170 noce_emit_move_insn (if_info->x, target);
2172 seq = end_ifcvt_sequence (if_info);
2173 if (!seq)
2174 return FALSE;
2176 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2177 if_info->cond = cond;
2178 if_info->cond_earliest = earliest;
2180 return TRUE;
2183 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2185 static int
2186 noce_try_sign_mask (struct noce_if_info *if_info)
2188 rtx cond, t, m, c;
2189 rtx_insn *seq;
2190 machine_mode mode;
2191 enum rtx_code code;
2192 bool t_unconditional;
2194 cond = if_info->cond;
2195 code = GET_CODE (cond);
2196 m = XEXP (cond, 0);
2197 c = XEXP (cond, 1);
2199 t = NULL_RTX;
2200 if (if_info->a == const0_rtx)
2202 if ((code == LT && c == const0_rtx)
2203 || (code == LE && c == constm1_rtx))
2204 t = if_info->b;
2206 else if (if_info->b == const0_rtx)
2208 if ((code == GE && c == const0_rtx)
2209 || (code == GT && c == constm1_rtx))
2210 t = if_info->a;
2213 if (! t || side_effects_p (t))
2214 return FALSE;
2216 /* We currently don't handle different modes. */
2217 mode = GET_MODE (t);
2218 if (GET_MODE (m) != mode)
2219 return FALSE;
2221 /* This is only profitable if T is unconditionally executed/evaluated in the
2222 original insn sequence or T is cheap. The former happens if B is the
2223 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2224 INSN_B which can happen for e.g. conditional stores to memory. For the
2225 cost computation use the block TEST_BB where the evaluation will end up
2226 after the transformation. */
2227 t_unconditional =
2228 (t == if_info->b
2229 && (if_info->insn_b == NULL_RTX
2230 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2231 if (!(t_unconditional
2232 || (set_src_cost (t, mode, optimize_bb_for_speed_p (if_info->test_bb))
2233 < COSTS_N_INSNS (2))))
2234 return FALSE;
2236 start_sequence ();
2237 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2238 "(signed) m >> 31" directly. This benefits targets with specialized
2239 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2240 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2241 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2242 : NULL_RTX;
2244 if (!t)
2246 end_sequence ();
2247 return FALSE;
2250 noce_emit_move_insn (if_info->x, t);
2252 seq = end_ifcvt_sequence (if_info);
2253 if (!seq)
2254 return FALSE;
2256 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2257 return TRUE;
2261 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2262 transformations. */
2264 static int
2265 noce_try_bitop (struct noce_if_info *if_info)
2267 rtx cond, x, a, result;
2268 rtx_insn *seq;
2269 machine_mode mode;
2270 enum rtx_code code;
2271 int bitnum;
2273 x = if_info->x;
2274 cond = if_info->cond;
2275 code = GET_CODE (cond);
2277 /* Check for no else condition. */
2278 if (! rtx_equal_p (x, if_info->b))
2279 return FALSE;
2281 /* Check for a suitable condition. */
2282 if (code != NE && code != EQ)
2283 return FALSE;
2284 if (XEXP (cond, 1) != const0_rtx)
2285 return FALSE;
2286 cond = XEXP (cond, 0);
2288 /* ??? We could also handle AND here. */
2289 if (GET_CODE (cond) == ZERO_EXTRACT)
2291 if (XEXP (cond, 1) != const1_rtx
2292 || !CONST_INT_P (XEXP (cond, 2))
2293 || ! rtx_equal_p (x, XEXP (cond, 0)))
2294 return FALSE;
2295 bitnum = INTVAL (XEXP (cond, 2));
2296 mode = GET_MODE (x);
2297 if (BITS_BIG_ENDIAN)
2298 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2299 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2300 return FALSE;
2302 else
2303 return FALSE;
2305 a = if_info->a;
2306 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2308 /* Check for "if (X & C) x = x op C". */
2309 if (! rtx_equal_p (x, XEXP (a, 0))
2310 || !CONST_INT_P (XEXP (a, 1))
2311 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2312 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2313 return FALSE;
2315 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2316 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2317 if (GET_CODE (a) == IOR)
2318 result = (code == NE) ? a : NULL_RTX;
2319 else if (code == NE)
2321 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2322 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2323 result = simplify_gen_binary (IOR, mode, x, result);
2325 else
2327 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2328 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2329 result = simplify_gen_binary (AND, mode, x, result);
2332 else if (GET_CODE (a) == AND)
2334 /* Check for "if (X & C) x &= ~C". */
2335 if (! rtx_equal_p (x, XEXP (a, 0))
2336 || !CONST_INT_P (XEXP (a, 1))
2337 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2338 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2339 return FALSE;
2341 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2342 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2343 result = (code == EQ) ? a : NULL_RTX;
2345 else
2346 return FALSE;
2348 if (result)
2350 start_sequence ();
2351 noce_emit_move_insn (x, result);
2352 seq = end_ifcvt_sequence (if_info);
2353 if (!seq)
2354 return FALSE;
2356 emit_insn_before_setloc (seq, if_info->jump,
2357 INSN_LOCATION (if_info->insn_a));
2359 return TRUE;
2363 /* Similar to get_condition, only the resulting condition must be
2364 valid at JUMP, instead of at EARLIEST.
2366 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2367 THEN block of the caller, and we have to reverse the condition. */
2369 static rtx
2370 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2372 rtx cond, set, tmp;
2373 bool reverse;
2375 if (! any_condjump_p (jump))
2376 return NULL_RTX;
2378 set = pc_set (jump);
2380 /* If this branches to JUMP_LABEL when the condition is false,
2381 reverse the condition. */
2382 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2383 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2385 /* We may have to reverse because the caller's if block is not canonical,
2386 i.e. the THEN block isn't the fallthrough block for the TEST block
2387 (see find_if_header). */
2388 if (then_else_reversed)
2389 reverse = !reverse;
2391 /* If the condition variable is a register and is MODE_INT, accept it. */
2393 cond = XEXP (SET_SRC (set), 0);
2394 tmp = XEXP (cond, 0);
2395 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2396 && (GET_MODE (tmp) != BImode
2397 || !targetm.small_register_classes_for_mode_p (BImode)))
2399 *earliest = jump;
2401 if (reverse)
2402 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2403 GET_MODE (cond), tmp, XEXP (cond, 1));
2404 return cond;
2407 /* Otherwise, fall back on canonicalize_condition to do the dirty
2408 work of manipulating MODE_CC values and COMPARE rtx codes. */
2409 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2410 NULL_RTX, HAVE_cbranchcc4, true);
2412 /* We don't handle side-effects in the condition, like handling
2413 REG_INC notes and making sure no duplicate conditions are emitted. */
2414 if (tmp != NULL_RTX && side_effects_p (tmp))
2415 return NULL_RTX;
2417 return tmp;
2420 /* Return true if OP is ok for if-then-else processing. */
2422 static int
2423 noce_operand_ok (const_rtx op)
2425 if (side_effects_p (op))
2426 return FALSE;
2428 /* We special-case memories, so handle any of them with
2429 no address side effects. */
2430 if (MEM_P (op))
2431 return ! side_effects_p (XEXP (op, 0));
2433 return ! may_trap_p (op);
2436 /* Return true if a write into MEM may trap or fault. */
2438 static bool
2439 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2441 rtx addr;
2443 if (MEM_READONLY_P (mem))
2444 return true;
2446 if (may_trap_or_fault_p (mem))
2447 return true;
2449 addr = XEXP (mem, 0);
2451 /* Call target hook to avoid the effects of -fpic etc.... */
2452 addr = targetm.delegitimize_address (addr);
2454 while (addr)
2455 switch (GET_CODE (addr))
2457 case CONST:
2458 case PRE_DEC:
2459 case PRE_INC:
2460 case POST_DEC:
2461 case POST_INC:
2462 case POST_MODIFY:
2463 addr = XEXP (addr, 0);
2464 break;
2465 case LO_SUM:
2466 case PRE_MODIFY:
2467 addr = XEXP (addr, 1);
2468 break;
2469 case PLUS:
2470 if (CONST_INT_P (XEXP (addr, 1)))
2471 addr = XEXP (addr, 0);
2472 else
2473 return false;
2474 break;
2475 case LABEL_REF:
2476 return true;
2477 case SYMBOL_REF:
2478 if (SYMBOL_REF_DECL (addr)
2479 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2480 return true;
2481 return false;
2482 default:
2483 return false;
2486 return false;
2489 /* Return whether we can use store speculation for MEM. TOP_BB is the
2490 basic block above the conditional block where we are considering
2491 doing the speculative store. We look for whether MEM is set
2492 unconditionally later in the function. */
2494 static bool
2495 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2497 basic_block dominator;
2499 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2500 dominator != NULL;
2501 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2503 rtx_insn *insn;
2505 FOR_BB_INSNS (dominator, insn)
2507 /* If we see something that might be a memory barrier, we
2508 have to stop looking. Even if the MEM is set later in
2509 the function, we still don't want to set it
2510 unconditionally before the barrier. */
2511 if (INSN_P (insn)
2512 && (volatile_insn_p (PATTERN (insn))
2513 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2514 return false;
2516 if (memory_must_be_modified_in_insn_p (mem, insn))
2517 return true;
2518 if (modified_in_p (XEXP (mem, 0), insn))
2519 return false;
2524 return false;
2527 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2528 it without using conditional execution. Return TRUE if we were successful
2529 at converting the block. */
2531 static int
2532 noce_process_if_block (struct noce_if_info *if_info)
2534 basic_block test_bb = if_info->test_bb; /* test block */
2535 basic_block then_bb = if_info->then_bb; /* THEN */
2536 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2537 basic_block join_bb = if_info->join_bb; /* JOIN */
2538 rtx_insn *jump = if_info->jump;
2539 rtx cond = if_info->cond;
2540 rtx_insn *insn_a, *insn_b;
2541 rtx set_a, set_b;
2542 rtx orig_x, x, a, b;
2543 rtx cc;
2545 /* We're looking for patterns of the form
2547 (1) if (...) x = a; else x = b;
2548 (2) x = b; if (...) x = a;
2549 (3) if (...) x = a; // as if with an initial x = x.
2551 The later patterns require jumps to be more expensive.
2553 ??? For future expansion, look for multiple X in such patterns. */
2555 /* Look for one of the potential sets. */
2556 insn_a = first_active_insn (then_bb);
2557 if (! insn_a
2558 || insn_a != last_active_insn (then_bb, FALSE)
2559 || (set_a = single_set (insn_a)) == NULL_RTX)
2560 return FALSE;
2562 x = SET_DEST (set_a);
2563 a = SET_SRC (set_a);
2565 /* Look for the other potential set. Make sure we've got equivalent
2566 destinations. */
2567 /* ??? This is overconservative. Storing to two different mems is
2568 as easy as conditionally computing the address. Storing to a
2569 single mem merely requires a scratch memory to use as one of the
2570 destination addresses; often the memory immediately below the
2571 stack pointer is available for this. */
2572 set_b = NULL_RTX;
2573 if (else_bb)
2575 insn_b = first_active_insn (else_bb);
2576 if (! insn_b
2577 || insn_b != last_active_insn (else_bb, FALSE)
2578 || (set_b = single_set (insn_b)) == NULL_RTX
2579 || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2580 return FALSE;
2582 else
2584 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2585 /* We're going to be moving the evaluation of B down from above
2586 COND_EARLIEST to JUMP. Make sure the relevant data is still
2587 intact. */
2588 if (! insn_b
2589 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2590 || !NONJUMP_INSN_P (insn_b)
2591 || (set_b = single_set (insn_b)) == NULL_RTX
2592 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2593 || ! noce_operand_ok (SET_SRC (set_b))
2594 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2595 || modified_between_p (SET_SRC (set_b), insn_b, jump)
2596 /* Avoid extending the lifetime of hard registers on small
2597 register class machines. */
2598 || (REG_P (SET_SRC (set_b))
2599 && HARD_REGISTER_P (SET_SRC (set_b))
2600 && targetm.small_register_classes_for_mode_p
2601 (GET_MODE (SET_SRC (set_b))))
2602 /* Likewise with X. In particular this can happen when
2603 noce_get_condition looks farther back in the instruction
2604 stream than one might expect. */
2605 || reg_overlap_mentioned_p (x, cond)
2606 || reg_overlap_mentioned_p (x, a)
2607 || modified_between_p (x, insn_b, jump))
2609 insn_b = NULL;
2610 set_b = NULL_RTX;
2614 /* If x has side effects then only the if-then-else form is safe to
2615 convert. But even in that case we would need to restore any notes
2616 (such as REG_INC) at then end. That can be tricky if
2617 noce_emit_move_insn expands to more than one insn, so disable the
2618 optimization entirely for now if there are side effects. */
2619 if (side_effects_p (x))
2620 return FALSE;
2622 b = (set_b ? SET_SRC (set_b) : x);
2624 /* Only operate on register destinations, and even then avoid extending
2625 the lifetime of hard registers on small register class machines. */
2626 orig_x = x;
2627 if (!REG_P (x)
2628 || (HARD_REGISTER_P (x)
2629 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2631 if (GET_MODE (x) == BLKmode)
2632 return FALSE;
2634 if (GET_CODE (x) == ZERO_EXTRACT
2635 && (!CONST_INT_P (XEXP (x, 1))
2636 || !CONST_INT_P (XEXP (x, 2))))
2637 return FALSE;
2639 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2640 ? XEXP (x, 0) : x));
2643 /* Don't operate on sources that may trap or are volatile. */
2644 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2645 return FALSE;
2647 retry:
2648 /* Set up the info block for our subroutines. */
2649 if_info->insn_a = insn_a;
2650 if_info->insn_b = insn_b;
2651 if_info->x = x;
2652 if_info->a = a;
2653 if_info->b = b;
2655 /* Skip it if the instruction to be moved might clobber CC. */
2656 cc = cc_in_cond (cond);
2657 if (cc
2658 && (set_of (cc, insn_a)
2659 || (insn_b && set_of (cc, insn_b))))
2660 return FALSE;
2662 /* Try optimizations in some approximation of a useful order. */
2663 /* ??? Should first look to see if X is live incoming at all. If it
2664 isn't, we don't need anything but an unconditional set. */
2666 /* Look and see if A and B are really the same. Avoid creating silly
2667 cmove constructs that no one will fix up later. */
2668 if (rtx_interchangeable_p (a, b))
2670 /* If we have an INSN_B, we don't have to create any new rtl. Just
2671 move the instruction that we already have. If we don't have an
2672 INSN_B, that means that A == X, and we've got a noop move. In
2673 that case don't do anything and let the code below delete INSN_A. */
2674 if (insn_b && else_bb)
2676 rtx note;
2678 if (else_bb && insn_b == BB_END (else_bb))
2679 BB_END (else_bb) = PREV_INSN (insn_b);
2680 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2682 /* If there was a REG_EQUAL note, delete it since it may have been
2683 true due to this insn being after a jump. */
2684 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2685 remove_note (insn_b, note);
2687 insn_b = NULL;
2689 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2690 x must be executed twice. */
2691 else if (insn_b && side_effects_p (orig_x))
2692 return FALSE;
2694 x = orig_x;
2695 goto success;
2698 if (!set_b && MEM_P (orig_x))
2700 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2701 for optimizations if writing to x may trap or fault,
2702 i.e. it's a memory other than a static var or a stack slot,
2703 is misaligned on strict aligned machines or is read-only. If
2704 x is a read-only memory, then the program is valid only if we
2705 avoid the store into it. If there are stores on both the
2706 THEN and ELSE arms, then we can go ahead with the conversion;
2707 either the program is broken, or the condition is always
2708 false such that the other memory is selected. */
2709 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2710 return FALSE;
2712 /* Avoid store speculation: given "if (...) x = a" where x is a
2713 MEM, we only want to do the store if x is always set
2714 somewhere in the function. This avoids cases like
2715 if (pthread_mutex_trylock(mutex))
2716 ++global_variable;
2717 where we only want global_variable to be changed if the mutex
2718 is held. FIXME: This should ideally be expressed directly in
2719 RTL somehow. */
2720 if (!noce_can_store_speculate_p (test_bb, orig_x))
2721 return FALSE;
2724 if (noce_try_move (if_info))
2725 goto success;
2726 if (noce_try_store_flag (if_info))
2727 goto success;
2728 if (noce_try_bitop (if_info))
2729 goto success;
2730 if (noce_try_minmax (if_info))
2731 goto success;
2732 if (noce_try_abs (if_info))
2733 goto success;
2734 if (HAVE_conditional_move
2735 && noce_try_cmove (if_info))
2736 goto success;
2737 if (! targetm.have_conditional_execution ())
2739 if (noce_try_store_flag_constants (if_info))
2740 goto success;
2741 if (noce_try_addcc (if_info))
2742 goto success;
2743 if (noce_try_store_flag_mask (if_info))
2744 goto success;
2745 if (HAVE_conditional_move
2746 && noce_try_cmove_arith (if_info))
2747 goto success;
2748 if (noce_try_sign_mask (if_info))
2749 goto success;
2752 if (!else_bb && set_b)
2754 insn_b = NULL;
2755 set_b = NULL_RTX;
2756 b = orig_x;
2757 goto retry;
2760 return FALSE;
2762 success:
2764 /* If we used a temporary, fix it up now. */
2765 if (orig_x != x)
2767 rtx_insn *seq;
2769 start_sequence ();
2770 noce_emit_move_insn (orig_x, x);
2771 seq = get_insns ();
2772 set_used_flags (orig_x);
2773 unshare_all_rtl_in_chain (seq);
2774 end_sequence ();
2776 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2779 /* The original THEN and ELSE blocks may now be removed. The test block
2780 must now jump to the join block. If the test block and the join block
2781 can be merged, do so. */
2782 if (else_bb)
2784 delete_basic_block (else_bb);
2785 num_true_changes++;
2787 else
2788 remove_edge (find_edge (test_bb, join_bb));
2790 remove_edge (find_edge (then_bb, join_bb));
2791 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2792 delete_basic_block (then_bb);
2793 num_true_changes++;
2795 if (can_merge_blocks_p (test_bb, join_bb))
2797 merge_blocks (test_bb, join_bb);
2798 num_true_changes++;
2801 num_updated_if_blocks++;
2802 return TRUE;
2805 /* Check whether a block is suitable for conditional move conversion.
2806 Every insn must be a simple set of a register to a constant or a
2807 register. For each assignment, store the value in the pointer map
2808 VALS, keyed indexed by register pointer, then store the register
2809 pointer in REGS. COND is the condition we will test. */
2811 static int
2812 check_cond_move_block (basic_block bb,
2813 hash_map<rtx, rtx> *vals,
2814 vec<rtx> *regs,
2815 rtx cond)
2817 rtx_insn *insn;
2818 rtx cc = cc_in_cond (cond);
2820 /* We can only handle simple jumps at the end of the basic block.
2821 It is almost impossible to update the CFG otherwise. */
2822 insn = BB_END (bb);
2823 if (JUMP_P (insn) && !onlyjump_p (insn))
2824 return FALSE;
2826 FOR_BB_INSNS (bb, insn)
2828 rtx set, dest, src;
2830 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2831 continue;
2832 set = single_set (insn);
2833 if (!set)
2834 return FALSE;
2836 dest = SET_DEST (set);
2837 src = SET_SRC (set);
2838 if (!REG_P (dest)
2839 || (HARD_REGISTER_P (dest)
2840 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2841 return FALSE;
2843 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2844 return FALSE;
2846 if (side_effects_p (src) || side_effects_p (dest))
2847 return FALSE;
2849 if (may_trap_p (src) || may_trap_p (dest))
2850 return FALSE;
2852 /* Don't try to handle this if the source register was
2853 modified earlier in the block. */
2854 if ((REG_P (src)
2855 && vals->get (src))
2856 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2857 && vals->get (SUBREG_REG (src))))
2858 return FALSE;
2860 /* Don't try to handle this if the destination register was
2861 modified earlier in the block. */
2862 if (vals->get (dest))
2863 return FALSE;
2865 /* Don't try to handle this if the condition uses the
2866 destination register. */
2867 if (reg_overlap_mentioned_p (dest, cond))
2868 return FALSE;
2870 /* Don't try to handle this if the source register is modified
2871 later in the block. */
2872 if (!CONSTANT_P (src)
2873 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2874 return FALSE;
2876 /* Skip it if the instruction to be moved might clobber CC. */
2877 if (cc && set_of (cc, insn))
2878 return FALSE;
2880 vals->put (dest, src);
2882 regs->safe_push (dest);
2885 return TRUE;
2888 /* Given a basic block BB suitable for conditional move conversion,
2889 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
2890 the register values depending on COND, emit the insns in the block as
2891 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2892 processed. The caller has started a sequence for the conversion.
2893 Return true if successful, false if something goes wrong. */
2895 static bool
2896 cond_move_convert_if_block (struct noce_if_info *if_infop,
2897 basic_block bb, rtx cond,
2898 hash_map<rtx, rtx> *then_vals,
2899 hash_map<rtx, rtx> *else_vals,
2900 bool else_block_p)
2902 enum rtx_code code;
2903 rtx_insn *insn;
2904 rtx cond_arg0, cond_arg1;
2906 code = GET_CODE (cond);
2907 cond_arg0 = XEXP (cond, 0);
2908 cond_arg1 = XEXP (cond, 1);
2910 FOR_BB_INSNS (bb, insn)
2912 rtx set, target, dest, t, e;
2914 /* ??? Maybe emit conditional debug insn? */
2915 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2916 continue;
2917 set = single_set (insn);
2918 gcc_assert (set && REG_P (SET_DEST (set)));
2920 dest = SET_DEST (set);
2922 rtx *then_slot = then_vals->get (dest);
2923 rtx *else_slot = else_vals->get (dest);
2924 t = then_slot ? *then_slot : NULL_RTX;
2925 e = else_slot ? *else_slot : NULL_RTX;
2927 if (else_block_p)
2929 /* If this register was set in the then block, we already
2930 handled this case there. */
2931 if (t)
2932 continue;
2933 t = dest;
2934 gcc_assert (e);
2936 else
2938 gcc_assert (t);
2939 if (!e)
2940 e = dest;
2943 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2944 t, e);
2945 if (!target)
2946 return false;
2948 if (target != dest)
2949 noce_emit_move_insn (dest, target);
2952 return true;
2955 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2956 it using only conditional moves. Return TRUE if we were successful at
2957 converting the block. */
2959 static int
2960 cond_move_process_if_block (struct noce_if_info *if_info)
2962 basic_block test_bb = if_info->test_bb;
2963 basic_block then_bb = if_info->then_bb;
2964 basic_block else_bb = if_info->else_bb;
2965 basic_block join_bb = if_info->join_bb;
2966 rtx_insn *jump = if_info->jump;
2967 rtx cond = if_info->cond;
2968 rtx_insn *seq, *loc_insn;
2969 rtx reg;
2970 int c;
2971 vec<rtx> then_regs = vNULL;
2972 vec<rtx> else_regs = vNULL;
2973 unsigned int i;
2974 int success_p = FALSE;
2976 /* Build a mapping for each block to the value used for each
2977 register. */
2978 hash_map<rtx, rtx> then_vals;
2979 hash_map<rtx, rtx> else_vals;
2981 /* Make sure the blocks are suitable. */
2982 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
2983 || (else_bb
2984 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
2985 goto done;
2987 /* Make sure the blocks can be used together. If the same register
2988 is set in both blocks, and is not set to a constant in both
2989 cases, then both blocks must set it to the same register. We
2990 have already verified that if it is set to a register, that the
2991 source register does not change after the assignment. Also count
2992 the number of registers set in only one of the blocks. */
2993 c = 0;
2994 FOR_EACH_VEC_ELT (then_regs, i, reg)
2996 rtx *then_slot = then_vals.get (reg);
2997 rtx *else_slot = else_vals.get (reg);
2999 gcc_checking_assert (then_slot);
3000 if (!else_slot)
3001 ++c;
3002 else
3004 rtx then_val = *then_slot;
3005 rtx else_val = *else_slot;
3006 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3007 && !rtx_equal_p (then_val, else_val))
3008 goto done;
3012 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3013 FOR_EACH_VEC_ELT (else_regs, i, reg)
3015 gcc_checking_assert (else_vals.get (reg));
3016 if (!then_vals.get (reg))
3017 ++c;
3020 /* Make sure it is reasonable to convert this block. What matters
3021 is the number of assignments currently made in only one of the
3022 branches, since if we convert we are going to always execute
3023 them. */
3024 if (c > MAX_CONDITIONAL_EXECUTE)
3025 goto done;
3027 /* Try to emit the conditional moves. First do the then block,
3028 then do anything left in the else blocks. */
3029 start_sequence ();
3030 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3031 &then_vals, &else_vals, false)
3032 || (else_bb
3033 && !cond_move_convert_if_block (if_info, else_bb, cond,
3034 &then_vals, &else_vals, true)))
3036 end_sequence ();
3037 goto done;
3039 seq = end_ifcvt_sequence (if_info);
3040 if (!seq)
3041 goto done;
3043 loc_insn = first_active_insn (then_bb);
3044 if (!loc_insn)
3046 loc_insn = first_active_insn (else_bb);
3047 gcc_assert (loc_insn);
3049 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3051 if (else_bb)
3053 delete_basic_block (else_bb);
3054 num_true_changes++;
3056 else
3057 remove_edge (find_edge (test_bb, join_bb));
3059 remove_edge (find_edge (then_bb, join_bb));
3060 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3061 delete_basic_block (then_bb);
3062 num_true_changes++;
3064 if (can_merge_blocks_p (test_bb, join_bb))
3066 merge_blocks (test_bb, join_bb);
3067 num_true_changes++;
3070 num_updated_if_blocks++;
3072 success_p = TRUE;
3074 done:
3075 then_regs.release ();
3076 else_regs.release ();
3077 return success_p;
3081 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3082 IF-THEN-ELSE-JOIN block.
3084 If so, we'll try to convert the insns to not require the branch,
3085 using only transformations that do not require conditional execution.
3087 Return TRUE if we were successful at converting the block. */
3089 static int
3090 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3091 int pass)
3093 basic_block then_bb, else_bb, join_bb;
3094 bool then_else_reversed = false;
3095 rtx_insn *jump;
3096 rtx cond;
3097 rtx_insn *cond_earliest;
3098 struct noce_if_info if_info;
3100 /* We only ever should get here before reload. */
3101 gcc_assert (!reload_completed);
3103 /* Recognize an IF-THEN-ELSE-JOIN block. */
3104 if (single_pred_p (then_edge->dest)
3105 && single_succ_p (then_edge->dest)
3106 && single_pred_p (else_edge->dest)
3107 && single_succ_p (else_edge->dest)
3108 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3110 then_bb = then_edge->dest;
3111 else_bb = else_edge->dest;
3112 join_bb = single_succ (then_bb);
3114 /* Recognize an IF-THEN-JOIN block. */
3115 else if (single_pred_p (then_edge->dest)
3116 && single_succ_p (then_edge->dest)
3117 && single_succ (then_edge->dest) == else_edge->dest)
3119 then_bb = then_edge->dest;
3120 else_bb = NULL_BLOCK;
3121 join_bb = else_edge->dest;
3123 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3124 of basic blocks in cfglayout mode does not matter, so the fallthrough
3125 edge can go to any basic block (and not just to bb->next_bb, like in
3126 cfgrtl mode). */
3127 else if (single_pred_p (else_edge->dest)
3128 && single_succ_p (else_edge->dest)
3129 && single_succ (else_edge->dest) == then_edge->dest)
3131 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3132 To make this work, we have to invert the THEN and ELSE blocks
3133 and reverse the jump condition. */
3134 then_bb = else_edge->dest;
3135 else_bb = NULL_BLOCK;
3136 join_bb = single_succ (then_bb);
3137 then_else_reversed = true;
3139 else
3140 /* Not a form we can handle. */
3141 return FALSE;
3143 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3144 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3145 return FALSE;
3146 if (else_bb
3147 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3148 return FALSE;
3150 num_possible_if_blocks++;
3152 if (dump_file)
3154 fprintf (dump_file,
3155 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3156 (else_bb) ? "-ELSE" : "",
3157 pass, test_bb->index, then_bb->index);
3159 if (else_bb)
3160 fprintf (dump_file, ", else %d", else_bb->index);
3162 fprintf (dump_file, ", join %d\n", join_bb->index);
3165 /* If the conditional jump is more than just a conditional
3166 jump, then we can not do if-conversion on this block. */
3167 jump = BB_END (test_bb);
3168 if (! onlyjump_p (jump))
3169 return FALSE;
3171 /* If this is not a standard conditional jump, we can't parse it. */
3172 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3173 if (!cond)
3174 return FALSE;
3176 /* We must be comparing objects whose modes imply the size. */
3177 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3178 return FALSE;
3180 /* Initialize an IF_INFO struct to pass around. */
3181 memset (&if_info, 0, sizeof if_info);
3182 if_info.test_bb = test_bb;
3183 if_info.then_bb = then_bb;
3184 if_info.else_bb = else_bb;
3185 if_info.join_bb = join_bb;
3186 if_info.cond = cond;
3187 if_info.cond_earliest = cond_earliest;
3188 if_info.jump = jump;
3189 if_info.then_else_reversed = then_else_reversed;
3190 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3191 predictable_edge_p (then_edge));
3193 /* Do the real work. */
3195 if (noce_process_if_block (&if_info))
3196 return TRUE;
3198 if (HAVE_conditional_move
3199 && cond_move_process_if_block (&if_info))
3200 return TRUE;
3202 return FALSE;
3206 /* Merge the blocks and mark for local life update. */
3208 static void
3209 merge_if_block (struct ce_if_block * ce_info)
3211 basic_block test_bb = ce_info->test_bb; /* last test block */
3212 basic_block then_bb = ce_info->then_bb; /* THEN */
3213 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3214 basic_block join_bb = ce_info->join_bb; /* join block */
3215 basic_block combo_bb;
3217 /* All block merging is done into the lower block numbers. */
3219 combo_bb = test_bb;
3220 df_set_bb_dirty (test_bb);
3222 /* Merge any basic blocks to handle && and || subtests. Each of
3223 the blocks are on the fallthru path from the predecessor block. */
3224 if (ce_info->num_multiple_test_blocks > 0)
3226 basic_block bb = test_bb;
3227 basic_block last_test_bb = ce_info->last_test_bb;
3228 basic_block fallthru = block_fallthru (bb);
3232 bb = fallthru;
3233 fallthru = block_fallthru (bb);
3234 merge_blocks (combo_bb, bb);
3235 num_true_changes++;
3237 while (bb != last_test_bb);
3240 /* Merge TEST block into THEN block. Normally the THEN block won't have a
3241 label, but it might if there were || tests. That label's count should be
3242 zero, and it normally should be removed. */
3244 if (then_bb)
3246 /* If THEN_BB has no successors, then there's a BARRIER after it.
3247 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3248 is no longer needed, and in fact it is incorrect to leave it in
3249 the insn stream. */
3250 if (EDGE_COUNT (then_bb->succs) == 0
3251 && EDGE_COUNT (combo_bb->succs) > 1)
3253 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3254 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3255 end = NEXT_INSN (end);
3257 if (end && BARRIER_P (end))
3258 delete_insn (end);
3260 merge_blocks (combo_bb, then_bb);
3261 num_true_changes++;
3264 /* The ELSE block, if it existed, had a label. That label count
3265 will almost always be zero, but odd things can happen when labels
3266 get their addresses taken. */
3267 if (else_bb)
3269 /* If ELSE_BB has no successors, then there's a BARRIER after it.
3270 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3271 is no longer needed, and in fact it is incorrect to leave it in
3272 the insn stream. */
3273 if (EDGE_COUNT (else_bb->succs) == 0
3274 && EDGE_COUNT (combo_bb->succs) > 1)
3276 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3277 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3278 end = NEXT_INSN (end);
3280 if (end && BARRIER_P (end))
3281 delete_insn (end);
3283 merge_blocks (combo_bb, else_bb);
3284 num_true_changes++;
3287 /* If there was no join block reported, that means it was not adjacent
3288 to the others, and so we cannot merge them. */
3290 if (! join_bb)
3292 rtx_insn *last = BB_END (combo_bb);
3294 /* The outgoing edge for the current COMBO block should already
3295 be correct. Verify this. */
3296 if (EDGE_COUNT (combo_bb->succs) == 0)
3297 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3298 || (NONJUMP_INSN_P (last)
3299 && GET_CODE (PATTERN (last)) == TRAP_IF
3300 && (TRAP_CONDITION (PATTERN (last))
3301 == const_true_rtx)));
3303 else
3304 /* There should still be something at the end of the THEN or ELSE
3305 blocks taking us to our final destination. */
3306 gcc_assert (JUMP_P (last)
3307 || (EDGE_SUCC (combo_bb, 0)->dest
3308 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3309 && CALL_P (last)
3310 && SIBLING_CALL_P (last))
3311 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3312 && can_throw_internal (last)));
3315 /* The JOIN block may have had quite a number of other predecessors too.
3316 Since we've already merged the TEST, THEN and ELSE blocks, we should
3317 have only one remaining edge from our if-then-else diamond. If there
3318 is more than one remaining edge, it must come from elsewhere. There
3319 may be zero incoming edges if the THEN block didn't actually join
3320 back up (as with a call to a non-return function). */
3321 else if (EDGE_COUNT (join_bb->preds) < 2
3322 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3324 /* We can merge the JOIN cleanly and update the dataflow try
3325 again on this pass.*/
3326 merge_blocks (combo_bb, join_bb);
3327 num_true_changes++;
3329 else
3331 /* We cannot merge the JOIN. */
3333 /* The outgoing edge for the current COMBO block should already
3334 be correct. Verify this. */
3335 gcc_assert (single_succ_p (combo_bb)
3336 && single_succ (combo_bb) == join_bb);
3338 /* Remove the jump and cruft from the end of the COMBO block. */
3339 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3340 tidy_fallthru_edge (single_succ_edge (combo_bb));
3343 num_updated_if_blocks++;
3346 /* Find a block ending in a simple IF condition and try to transform it
3347 in some way. When converting a multi-block condition, put the new code
3348 in the first such block and delete the rest. Return a pointer to this
3349 first block if some transformation was done. Return NULL otherwise. */
3351 static basic_block
3352 find_if_header (basic_block test_bb, int pass)
3354 ce_if_block ce_info;
3355 edge then_edge;
3356 edge else_edge;
3358 /* The kind of block we're looking for has exactly two successors. */
3359 if (EDGE_COUNT (test_bb->succs) != 2)
3360 return NULL;
3362 then_edge = EDGE_SUCC (test_bb, 0);
3363 else_edge = EDGE_SUCC (test_bb, 1);
3365 if (df_get_bb_dirty (then_edge->dest))
3366 return NULL;
3367 if (df_get_bb_dirty (else_edge->dest))
3368 return NULL;
3370 /* Neither edge should be abnormal. */
3371 if ((then_edge->flags & EDGE_COMPLEX)
3372 || (else_edge->flags & EDGE_COMPLEX))
3373 return NULL;
3375 /* Nor exit the loop. */
3376 if ((then_edge->flags & EDGE_LOOP_EXIT)
3377 || (else_edge->flags & EDGE_LOOP_EXIT))
3378 return NULL;
3380 /* The THEN edge is canonically the one that falls through. */
3381 if (then_edge->flags & EDGE_FALLTHRU)
3383 else if (else_edge->flags & EDGE_FALLTHRU)
3384 std::swap (then_edge, else_edge);
3385 else
3386 /* Otherwise this must be a multiway branch of some sort. */
3387 return NULL;
3389 memset (&ce_info, 0, sizeof (ce_info));
3390 ce_info.test_bb = test_bb;
3391 ce_info.then_bb = then_edge->dest;
3392 ce_info.else_bb = else_edge->dest;
3393 ce_info.pass = pass;
3395 #ifdef IFCVT_MACHDEP_INIT
3396 IFCVT_MACHDEP_INIT (&ce_info);
3397 #endif
3399 if (!reload_completed
3400 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3401 goto success;
3403 if (reload_completed
3404 && targetm.have_conditional_execution ()
3405 && cond_exec_find_if_block (&ce_info))
3406 goto success;
3408 if (targetm.have_trap ()
3409 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3410 && find_cond_trap (test_bb, then_edge, else_edge))
3411 goto success;
3413 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3414 && (reload_completed || !targetm.have_conditional_execution ()))
3416 if (find_if_case_1 (test_bb, then_edge, else_edge))
3417 goto success;
3418 if (find_if_case_2 (test_bb, then_edge, else_edge))
3419 goto success;
3422 return NULL;
3424 success:
3425 if (dump_file)
3426 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3427 /* Set this so we continue looking. */
3428 cond_exec_changed_p = TRUE;
3429 return ce_info.test_bb;
3432 /* Return true if a block has two edges, one of which falls through to the next
3433 block, and the other jumps to a specific block, so that we can tell if the
3434 block is part of an && test or an || test. Returns either -1 or the number
3435 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3437 static int
3438 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3440 edge cur_edge;
3441 int fallthru_p = FALSE;
3442 int jump_p = FALSE;
3443 rtx_insn *insn;
3444 rtx_insn *end;
3445 int n_insns = 0;
3446 edge_iterator ei;
3448 if (!cur_bb || !target_bb)
3449 return -1;
3451 /* If no edges, obviously it doesn't jump or fallthru. */
3452 if (EDGE_COUNT (cur_bb->succs) == 0)
3453 return FALSE;
3455 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3457 if (cur_edge->flags & EDGE_COMPLEX)
3458 /* Anything complex isn't what we want. */
3459 return -1;
3461 else if (cur_edge->flags & EDGE_FALLTHRU)
3462 fallthru_p = TRUE;
3464 else if (cur_edge->dest == target_bb)
3465 jump_p = TRUE;
3467 else
3468 return -1;
3471 if ((jump_p & fallthru_p) == 0)
3472 return -1;
3474 /* Don't allow calls in the block, since this is used to group && and ||
3475 together for conditional execution support. ??? we should support
3476 conditional execution support across calls for IA-64 some day, but
3477 for now it makes the code simpler. */
3478 end = BB_END (cur_bb);
3479 insn = BB_HEAD (cur_bb);
3481 while (insn != NULL_RTX)
3483 if (CALL_P (insn))
3484 return -1;
3486 if (INSN_P (insn)
3487 && !JUMP_P (insn)
3488 && !DEBUG_INSN_P (insn)
3489 && GET_CODE (PATTERN (insn)) != USE
3490 && GET_CODE (PATTERN (insn)) != CLOBBER)
3491 n_insns++;
3493 if (insn == end)
3494 break;
3496 insn = NEXT_INSN (insn);
3499 return n_insns;
3502 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3503 block. If so, we'll try to convert the insns to not require the branch.
3504 Return TRUE if we were successful at converting the block. */
3506 static int
3507 cond_exec_find_if_block (struct ce_if_block * ce_info)
3509 basic_block test_bb = ce_info->test_bb;
3510 basic_block then_bb = ce_info->then_bb;
3511 basic_block else_bb = ce_info->else_bb;
3512 basic_block join_bb = NULL_BLOCK;
3513 edge cur_edge;
3514 basic_block next;
3515 edge_iterator ei;
3517 ce_info->last_test_bb = test_bb;
3519 /* We only ever should get here after reload,
3520 and if we have conditional execution. */
3521 gcc_assert (reload_completed && targetm.have_conditional_execution ());
3523 /* Discover if any fall through predecessors of the current test basic block
3524 were && tests (which jump to the else block) or || tests (which jump to
3525 the then block). */
3526 if (single_pred_p (test_bb)
3527 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3529 basic_block bb = single_pred (test_bb);
3530 basic_block target_bb;
3531 int max_insns = MAX_CONDITIONAL_EXECUTE;
3532 int n_insns;
3534 /* Determine if the preceding block is an && or || block. */
3535 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3537 ce_info->and_and_p = TRUE;
3538 target_bb = else_bb;
3540 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3542 ce_info->and_and_p = FALSE;
3543 target_bb = then_bb;
3545 else
3546 target_bb = NULL_BLOCK;
3548 if (target_bb && n_insns <= max_insns)
3550 int total_insns = 0;
3551 int blocks = 0;
3553 ce_info->last_test_bb = test_bb;
3555 /* Found at least one && or || block, look for more. */
3558 ce_info->test_bb = test_bb = bb;
3559 total_insns += n_insns;
3560 blocks++;
3562 if (!single_pred_p (bb))
3563 break;
3565 bb = single_pred (bb);
3566 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3568 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3570 ce_info->num_multiple_test_blocks = blocks;
3571 ce_info->num_multiple_test_insns = total_insns;
3573 if (ce_info->and_and_p)
3574 ce_info->num_and_and_blocks = blocks;
3575 else
3576 ce_info->num_or_or_blocks = blocks;
3580 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3581 other than any || blocks which jump to the THEN block. */
3582 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3583 return FALSE;
3585 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3586 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3588 if (cur_edge->flags & EDGE_COMPLEX)
3589 return FALSE;
3592 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3594 if (cur_edge->flags & EDGE_COMPLEX)
3595 return FALSE;
3598 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3599 if (EDGE_COUNT (then_bb->succs) > 0
3600 && (!single_succ_p (then_bb)
3601 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3602 || (epilogue_completed
3603 && tablejump_p (BB_END (then_bb), NULL, NULL))))
3604 return FALSE;
3606 /* If the THEN block has no successors, conditional execution can still
3607 make a conditional call. Don't do this unless the ELSE block has
3608 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3609 Check for the last insn of the THEN block being an indirect jump, which
3610 is listed as not having any successors, but confuses the rest of the CE
3611 code processing. ??? we should fix this in the future. */
3612 if (EDGE_COUNT (then_bb->succs) == 0)
3614 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3616 rtx_insn *last_insn = BB_END (then_bb);
3618 while (last_insn
3619 && NOTE_P (last_insn)
3620 && last_insn != BB_HEAD (then_bb))
3621 last_insn = PREV_INSN (last_insn);
3623 if (last_insn
3624 && JUMP_P (last_insn)
3625 && ! simplejump_p (last_insn))
3626 return FALSE;
3628 join_bb = else_bb;
3629 else_bb = NULL_BLOCK;
3631 else
3632 return FALSE;
3635 /* If the THEN block's successor is the other edge out of the TEST block,
3636 then we have an IF-THEN combo without an ELSE. */
3637 else if (single_succ (then_bb) == else_bb)
3639 join_bb = else_bb;
3640 else_bb = NULL_BLOCK;
3643 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3644 has exactly one predecessor and one successor, and the outgoing edge
3645 is not complex, then we have an IF-THEN-ELSE combo. */
3646 else if (single_succ_p (else_bb)
3647 && single_succ (then_bb) == single_succ (else_bb)
3648 && single_pred_p (else_bb)
3649 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3650 && !(epilogue_completed
3651 && tablejump_p (BB_END (else_bb), NULL, NULL)))
3652 join_bb = single_succ (else_bb);
3654 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3655 else
3656 return FALSE;
3658 num_possible_if_blocks++;
3660 if (dump_file)
3662 fprintf (dump_file,
3663 "\nIF-THEN%s block found, pass %d, start block %d "
3664 "[insn %d], then %d [%d]",
3665 (else_bb) ? "-ELSE" : "",
3666 ce_info->pass,
3667 test_bb->index,
3668 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3669 then_bb->index,
3670 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3672 if (else_bb)
3673 fprintf (dump_file, ", else %d [%d]",
3674 else_bb->index,
3675 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3677 fprintf (dump_file, ", join %d [%d]",
3678 join_bb->index,
3679 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3681 if (ce_info->num_multiple_test_blocks > 0)
3682 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3683 ce_info->num_multiple_test_blocks,
3684 (ce_info->and_and_p) ? "&&" : "||",
3685 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3686 ce_info->last_test_bb->index,
3687 ((BB_HEAD (ce_info->last_test_bb))
3688 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3689 : -1));
3691 fputc ('\n', dump_file);
3694 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3695 first condition for free, since we've already asserted that there's a
3696 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3697 we checked the FALLTHRU flag, those are already adjacent to the last IF
3698 block. */
3699 /* ??? As an enhancement, move the ELSE block. Have to deal with
3700 BLOCK notes, if by no other means than backing out the merge if they
3701 exist. Sticky enough I don't want to think about it now. */
3702 next = then_bb;
3703 if (else_bb && (next = next->next_bb) != else_bb)
3704 return FALSE;
3705 if ((next = next->next_bb) != join_bb
3706 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3708 if (else_bb)
3709 join_bb = NULL;
3710 else
3711 return FALSE;
3714 /* Do the real work. */
3716 ce_info->else_bb = else_bb;
3717 ce_info->join_bb = join_bb;
3719 /* If we have && and || tests, try to first handle combining the && and ||
3720 tests into the conditional code, and if that fails, go back and handle
3721 it without the && and ||, which at present handles the && case if there
3722 was no ELSE block. */
3723 if (cond_exec_process_if_block (ce_info, TRUE))
3724 return TRUE;
3726 if (ce_info->num_multiple_test_blocks)
3728 cancel_changes (0);
3730 if (cond_exec_process_if_block (ce_info, FALSE))
3731 return TRUE;
3734 return FALSE;
3737 /* Convert a branch over a trap, or a branch
3738 to a trap, into a conditional trap. */
3740 static int
3741 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3743 basic_block then_bb = then_edge->dest;
3744 basic_block else_bb = else_edge->dest;
3745 basic_block other_bb, trap_bb;
3746 rtx_insn *trap, *jump;
3747 rtx cond;
3748 rtx_insn *cond_earliest;
3749 enum rtx_code code;
3751 /* Locate the block with the trap instruction. */
3752 /* ??? While we look for no successors, we really ought to allow
3753 EH successors. Need to fix merge_if_block for that to work. */
3754 if ((trap = block_has_only_trap (then_bb)) != NULL)
3755 trap_bb = then_bb, other_bb = else_bb;
3756 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3757 trap_bb = else_bb, other_bb = then_bb;
3758 else
3759 return FALSE;
3761 if (dump_file)
3763 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3764 test_bb->index, trap_bb->index);
3767 /* If this is not a standard conditional jump, we can't parse it. */
3768 jump = BB_END (test_bb);
3769 cond = noce_get_condition (jump, &cond_earliest, false);
3770 if (! cond)
3771 return FALSE;
3773 /* If the conditional jump is more than just a conditional jump, then
3774 we can not do if-conversion on this block. */
3775 if (! onlyjump_p (jump))
3776 return FALSE;
3778 /* We must be comparing objects whose modes imply the size. */
3779 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3780 return FALSE;
3782 /* Reverse the comparison code, if necessary. */
3783 code = GET_CODE (cond);
3784 if (then_bb == trap_bb)
3786 code = reversed_comparison_code (cond, jump);
3787 if (code == UNKNOWN)
3788 return FALSE;
3791 /* Attempt to generate the conditional trap. */
3792 rtx_insn *seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3793 copy_rtx (XEXP (cond, 1)),
3794 TRAP_CODE (PATTERN (trap)));
3795 if (seq == NULL)
3796 return FALSE;
3798 /* Emit the new insns before cond_earliest. */
3799 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3801 /* Delete the trap block if possible. */
3802 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3803 df_set_bb_dirty (test_bb);
3804 df_set_bb_dirty (then_bb);
3805 df_set_bb_dirty (else_bb);
3807 if (EDGE_COUNT (trap_bb->preds) == 0)
3809 delete_basic_block (trap_bb);
3810 num_true_changes++;
3813 /* Wire together the blocks again. */
3814 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3815 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3816 else if (trap_bb == then_bb)
3818 rtx lab = JUMP_LABEL (jump);
3819 rtx_insn *seq = targetm.gen_jump (lab);
3820 rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
3821 LABEL_NUSES (lab) += 1;
3822 JUMP_LABEL (newjump) = lab;
3823 emit_barrier_after (newjump);
3825 delete_insn (jump);
3827 if (can_merge_blocks_p (test_bb, other_bb))
3829 merge_blocks (test_bb, other_bb);
3830 num_true_changes++;
3833 num_updated_if_blocks++;
3834 return TRUE;
3837 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3838 return it. */
3840 static rtx_insn *
3841 block_has_only_trap (basic_block bb)
3843 rtx_insn *trap;
3845 /* We're not the exit block. */
3846 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3847 return NULL;
3849 /* The block must have no successors. */
3850 if (EDGE_COUNT (bb->succs) > 0)
3851 return NULL;
3853 /* The only instruction in the THEN block must be the trap. */
3854 trap = first_active_insn (bb);
3855 if (! (trap == BB_END (bb)
3856 && GET_CODE (PATTERN (trap)) == TRAP_IF
3857 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3858 return NULL;
3860 return trap;
3863 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3864 transformable, but not necessarily the other. There need be no
3865 JOIN block.
3867 Return TRUE if we were successful at converting the block.
3869 Cases we'd like to look at:
3872 if (test) goto over; // x not live
3873 x = a;
3874 goto label;
3875 over:
3877 becomes
3879 x = a;
3880 if (! test) goto label;
3883 if (test) goto E; // x not live
3884 x = big();
3885 goto L;
3887 x = b;
3888 goto M;
3890 becomes
3892 x = b;
3893 if (test) goto M;
3894 x = big();
3895 goto L;
3897 (3) // This one's really only interesting for targets that can do
3898 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3899 // it results in multiple branches on a cache line, which often
3900 // does not sit well with predictors.
3902 if (test1) goto E; // predicted not taken
3903 x = a;
3904 if (test2) goto F;
3907 x = b;
3910 becomes
3912 x = a;
3913 if (test1) goto E;
3914 if (test2) goto F;
3916 Notes:
3918 (A) Don't do (2) if the branch is predicted against the block we're
3919 eliminating. Do it anyway if we can eliminate a branch; this requires
3920 that the sole successor of the eliminated block postdominate the other
3921 side of the if.
3923 (B) With CE, on (3) we can steal from both sides of the if, creating
3925 if (test1) x = a;
3926 if (!test1) x = b;
3927 if (test1) goto J;
3928 if (test2) goto F;
3932 Again, this is most useful if J postdominates.
3934 (C) CE substitutes for helpful life information.
3936 (D) These heuristics need a lot of work. */
3938 /* Tests for case 1 above. */
3940 static int
3941 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3943 basic_block then_bb = then_edge->dest;
3944 basic_block else_bb = else_edge->dest;
3945 basic_block new_bb;
3946 int then_bb_index, then_prob;
3947 rtx else_target = NULL_RTX;
3949 /* If we are partitioning hot/cold basic blocks, we don't want to
3950 mess up unconditional or indirect jumps that cross between hot
3951 and cold sections.
3953 Basic block partitioning may result in some jumps that appear to
3954 be optimizable (or blocks that appear to be mergeable), but which really
3955 must be left untouched (they are required to make it safely across
3956 partition boundaries). See the comments at the top of
3957 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3959 if ((BB_END (then_bb)
3960 && JUMP_P (BB_END (then_bb))
3961 && CROSSING_JUMP_P (BB_END (then_bb)))
3962 || (BB_END (test_bb)
3963 && JUMP_P (BB_END (test_bb))
3964 && CROSSING_JUMP_P (BB_END (test_bb)))
3965 || (BB_END (else_bb)
3966 && JUMP_P (BB_END (else_bb))
3967 && CROSSING_JUMP_P (BB_END (else_bb))))
3968 return FALSE;
3970 /* THEN has one successor. */
3971 if (!single_succ_p (then_bb))
3972 return FALSE;
3974 /* THEN does not fall through, but is not strange either. */
3975 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3976 return FALSE;
3978 /* THEN has one predecessor. */
3979 if (!single_pred_p (then_bb))
3980 return FALSE;
3982 /* THEN must do something. */
3983 if (forwarder_block_p (then_bb))
3984 return FALSE;
3986 num_possible_if_blocks++;
3987 if (dump_file)
3988 fprintf (dump_file,
3989 "\nIF-CASE-1 found, start %d, then %d\n",
3990 test_bb->index, then_bb->index);
3992 if (then_edge->probability)
3993 then_prob = REG_BR_PROB_BASE - then_edge->probability;
3994 else
3995 then_prob = REG_BR_PROB_BASE / 2;
3997 /* We're speculating from the THEN path, we want to make sure the cost
3998 of speculation is within reason. */
3999 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4000 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4001 predictable_edge_p (then_edge)))))
4002 return FALSE;
4004 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4006 rtx_insn *jump = BB_END (else_edge->src);
4007 gcc_assert (JUMP_P (jump));
4008 else_target = JUMP_LABEL (jump);
4011 /* Registers set are dead, or are predicable. */
4012 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4013 single_succ_edge (then_bb), 1))
4014 return FALSE;
4016 /* Conversion went ok, including moving the insns and fixing up the
4017 jump. Adjust the CFG to match. */
4019 /* We can avoid creating a new basic block if then_bb is immediately
4020 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4021 through to else_bb. */
4023 if (then_bb->next_bb == else_bb
4024 && then_bb->prev_bb == test_bb
4025 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4027 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4028 new_bb = 0;
4030 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4031 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4032 else_bb, else_target);
4033 else
4034 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4035 else_bb);
4037 df_set_bb_dirty (test_bb);
4038 df_set_bb_dirty (else_bb);
4040 then_bb_index = then_bb->index;
4041 delete_basic_block (then_bb);
4043 /* Make rest of code believe that the newly created block is the THEN_BB
4044 block we removed. */
4045 if (new_bb)
4047 df_bb_replace (then_bb_index, new_bb);
4048 /* This should have been done above via force_nonfallthru_and_redirect
4049 (possibly called from redirect_edge_and_branch_force). */
4050 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4053 num_true_changes++;
4054 num_updated_if_blocks++;
4056 return TRUE;
4059 /* Test for case 2 above. */
4061 static int
4062 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4064 basic_block then_bb = then_edge->dest;
4065 basic_block else_bb = else_edge->dest;
4066 edge else_succ;
4067 int then_prob, else_prob;
4069 /* We do not want to speculate (empty) loop latches. */
4070 if (current_loops
4071 && else_bb->loop_father->latch == else_bb)
4072 return FALSE;
4074 /* If we are partitioning hot/cold basic blocks, we don't want to
4075 mess up unconditional or indirect jumps that cross between hot
4076 and cold sections.
4078 Basic block partitioning may result in some jumps that appear to
4079 be optimizable (or blocks that appear to be mergeable), but which really
4080 must be left untouched (they are required to make it safely across
4081 partition boundaries). See the comments at the top of
4082 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4084 if ((BB_END (then_bb)
4085 && JUMP_P (BB_END (then_bb))
4086 && CROSSING_JUMP_P (BB_END (then_bb)))
4087 || (BB_END (test_bb)
4088 && JUMP_P (BB_END (test_bb))
4089 && CROSSING_JUMP_P (BB_END (test_bb)))
4090 || (BB_END (else_bb)
4091 && JUMP_P (BB_END (else_bb))
4092 && CROSSING_JUMP_P (BB_END (else_bb))))
4093 return FALSE;
4095 /* ELSE has one successor. */
4096 if (!single_succ_p (else_bb))
4097 return FALSE;
4098 else
4099 else_succ = single_succ_edge (else_bb);
4101 /* ELSE outgoing edge is not complex. */
4102 if (else_succ->flags & EDGE_COMPLEX)
4103 return FALSE;
4105 /* ELSE has one predecessor. */
4106 if (!single_pred_p (else_bb))
4107 return FALSE;
4109 /* THEN is not EXIT. */
4110 if (then_bb->index < NUM_FIXED_BLOCKS)
4111 return FALSE;
4113 if (else_edge->probability)
4115 else_prob = else_edge->probability;
4116 then_prob = REG_BR_PROB_BASE - else_prob;
4118 else
4120 else_prob = REG_BR_PROB_BASE / 2;
4121 then_prob = REG_BR_PROB_BASE / 2;
4124 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4125 if (else_prob > then_prob)
4127 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4128 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4129 else_succ->dest))
4131 else
4132 return FALSE;
4134 num_possible_if_blocks++;
4135 if (dump_file)
4136 fprintf (dump_file,
4137 "\nIF-CASE-2 found, start %d, else %d\n",
4138 test_bb->index, else_bb->index);
4140 /* We're speculating from the ELSE path, we want to make sure the cost
4141 of speculation is within reason. */
4142 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4143 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4144 predictable_edge_p (else_edge)))))
4145 return FALSE;
4147 /* Registers set are dead, or are predicable. */
4148 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4149 return FALSE;
4151 /* Conversion went ok, including moving the insns and fixing up the
4152 jump. Adjust the CFG to match. */
4154 df_set_bb_dirty (test_bb);
4155 df_set_bb_dirty (then_bb);
4156 delete_basic_block (else_bb);
4158 num_true_changes++;
4159 num_updated_if_blocks++;
4161 /* ??? We may now fallthru from one of THEN's successors into a join
4162 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4164 return TRUE;
4167 /* Used by the code above to perform the actual rtl transformations.
4168 Return TRUE if successful.
4170 TEST_BB is the block containing the conditional branch. MERGE_BB
4171 is the block containing the code to manipulate. DEST_EDGE is an
4172 edge representing a jump to the join block; after the conversion,
4173 TEST_BB should be branching to its destination.
4174 REVERSEP is true if the sense of the branch should be reversed. */
4176 static int
4177 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4178 basic_block other_bb, edge dest_edge, int reversep)
4180 basic_block new_dest = dest_edge->dest;
4181 rtx_insn *head, *end, *jump;
4182 rtx_insn *earliest = NULL;
4183 rtx old_dest;
4184 bitmap merge_set = NULL;
4185 /* Number of pending changes. */
4186 int n_validated_changes = 0;
4187 rtx new_dest_label = NULL_RTX;
4189 jump = BB_END (test_bb);
4191 /* Find the extent of the real code in the merge block. */
4192 head = BB_HEAD (merge_bb);
4193 end = BB_END (merge_bb);
4195 while (DEBUG_INSN_P (end) && end != head)
4196 end = PREV_INSN (end);
4198 /* If merge_bb ends with a tablejump, predicating/moving insn's
4199 into test_bb and then deleting merge_bb will result in the jumptable
4200 that follows merge_bb being removed along with merge_bb and then we
4201 get an unresolved reference to the jumptable. */
4202 if (tablejump_p (end, NULL, NULL))
4203 return FALSE;
4205 if (LABEL_P (head))
4206 head = NEXT_INSN (head);
4207 while (DEBUG_INSN_P (head) && head != end)
4208 head = NEXT_INSN (head);
4209 if (NOTE_P (head))
4211 if (head == end)
4213 head = end = NULL;
4214 goto no_body;
4216 head = NEXT_INSN (head);
4217 while (DEBUG_INSN_P (head) && head != end)
4218 head = NEXT_INSN (head);
4221 if (JUMP_P (end))
4223 if (!onlyjump_p (end))
4224 return FALSE;
4225 if (head == end)
4227 head = end = NULL;
4228 goto no_body;
4230 end = PREV_INSN (end);
4231 while (DEBUG_INSN_P (end) && end != head)
4232 end = PREV_INSN (end);
4235 /* Don't move frame-related insn across the conditional branch. This
4236 can lead to one of the paths of the branch having wrong unwind info. */
4237 if (epilogue_completed)
4239 rtx_insn *insn = head;
4240 while (1)
4242 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4243 return FALSE;
4244 if (insn == end)
4245 break;
4246 insn = NEXT_INSN (insn);
4250 /* Disable handling dead code by conditional execution if the machine needs
4251 to do anything funny with the tests, etc. */
4252 #ifndef IFCVT_MODIFY_TESTS
4253 if (targetm.have_conditional_execution ())
4255 /* In the conditional execution case, we have things easy. We know
4256 the condition is reversible. We don't have to check life info
4257 because we're going to conditionally execute the code anyway.
4258 All that's left is making sure the insns involved can actually
4259 be predicated. */
4261 rtx cond;
4263 cond = cond_exec_get_condition (jump);
4264 if (! cond)
4265 return FALSE;
4267 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4268 int prob_val = (note ? XINT (note, 0) : -1);
4270 if (reversep)
4272 enum rtx_code rev = reversed_comparison_code (cond, jump);
4273 if (rev == UNKNOWN)
4274 return FALSE;
4275 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4276 XEXP (cond, 1));
4277 if (prob_val >= 0)
4278 prob_val = REG_BR_PROB_BASE - prob_val;
4281 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4282 && verify_changes (0))
4283 n_validated_changes = num_validated_changes ();
4284 else
4285 cancel_changes (0);
4287 earliest = jump;
4289 #endif
4291 /* If we allocated new pseudos (e.g. in the conditional move
4292 expander called from noce_emit_cmove), we must resize the
4293 array first. */
4294 if (max_regno < max_reg_num ())
4295 max_regno = max_reg_num ();
4297 /* Try the NCE path if the CE path did not result in any changes. */
4298 if (n_validated_changes == 0)
4300 rtx cond;
4301 rtx_insn *insn;
4302 regset live;
4303 bool success;
4305 /* In the non-conditional execution case, we have to verify that there
4306 are no trapping operations, no calls, no references to memory, and
4307 that any registers modified are dead at the branch site. */
4309 if (!any_condjump_p (jump))
4310 return FALSE;
4312 /* Find the extent of the conditional. */
4313 cond = noce_get_condition (jump, &earliest, false);
4314 if (!cond)
4315 return FALSE;
4317 live = BITMAP_ALLOC (&reg_obstack);
4318 simulate_backwards_to_point (merge_bb, live, end);
4319 success = can_move_insns_across (head, end, earliest, jump,
4320 merge_bb, live,
4321 df_get_live_in (other_bb), NULL);
4322 BITMAP_FREE (live);
4323 if (!success)
4324 return FALSE;
4326 /* Collect the set of registers set in MERGE_BB. */
4327 merge_set = BITMAP_ALLOC (&reg_obstack);
4329 FOR_BB_INSNS (merge_bb, insn)
4330 if (NONDEBUG_INSN_P (insn))
4331 df_simulate_find_defs (insn, merge_set);
4333 /* If shrink-wrapping, disable this optimization when test_bb is
4334 the first basic block and merge_bb exits. The idea is to not
4335 move code setting up a return register as that may clobber a
4336 register used to pass function parameters, which then must be
4337 saved in caller-saved regs. A caller-saved reg requires the
4338 prologue, killing a shrink-wrap opportunity. */
4339 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4340 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4341 && single_succ_p (new_dest)
4342 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4343 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4345 regset return_regs;
4346 unsigned int i;
4348 return_regs = BITMAP_ALLOC (&reg_obstack);
4350 /* Start off with the intersection of regs used to pass
4351 params and regs used to return values. */
4352 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4353 if (FUNCTION_ARG_REGNO_P (i)
4354 && targetm.calls.function_value_regno_p (i))
4355 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4357 bitmap_and_into (return_regs,
4358 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4359 bitmap_and_into (return_regs,
4360 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4361 if (!bitmap_empty_p (return_regs))
4363 FOR_BB_INSNS_REVERSE (new_dest, insn)
4364 if (NONDEBUG_INSN_P (insn))
4366 df_ref def;
4368 /* If this insn sets any reg in return_regs, add all
4369 reg uses to the set of regs we're interested in. */
4370 FOR_EACH_INSN_DEF (def, insn)
4371 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4373 df_simulate_uses (insn, return_regs);
4374 break;
4377 if (bitmap_intersect_p (merge_set, return_regs))
4379 BITMAP_FREE (return_regs);
4380 BITMAP_FREE (merge_set);
4381 return FALSE;
4384 BITMAP_FREE (return_regs);
4388 no_body:
4389 /* We don't want to use normal invert_jump or redirect_jump because
4390 we don't want to delete_insn called. Also, we want to do our own
4391 change group management. */
4393 old_dest = JUMP_LABEL (jump);
4394 if (other_bb != new_dest)
4396 if (!any_condjump_p (jump))
4397 goto cancel;
4399 if (JUMP_P (BB_END (dest_edge->src)))
4400 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4401 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4402 new_dest_label = ret_rtx;
4403 else
4404 new_dest_label = block_label (new_dest);
4406 rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
4407 if (reversep
4408 ? ! invert_jump_1 (jump_insn, new_dest_label)
4409 : ! redirect_jump_1 (jump_insn, new_dest_label))
4410 goto cancel;
4413 if (verify_changes (n_validated_changes))
4414 confirm_change_group ();
4415 else
4416 goto cancel;
4418 if (other_bb != new_dest)
4420 redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
4421 0, reversep);
4423 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4424 if (reversep)
4426 std::swap (BRANCH_EDGE (test_bb)->count,
4427 FALLTHRU_EDGE (test_bb)->count);
4428 std::swap (BRANCH_EDGE (test_bb)->probability,
4429 FALLTHRU_EDGE (test_bb)->probability);
4430 update_br_prob_note (test_bb);
4434 /* Move the insns out of MERGE_BB to before the branch. */
4435 if (head != NULL)
4437 rtx_insn *insn;
4439 if (end == BB_END (merge_bb))
4440 BB_END (merge_bb) = PREV_INSN (head);
4442 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4443 notes being moved might become invalid. */
4444 insn = head;
4447 rtx note;
4449 if (! INSN_P (insn))
4450 continue;
4451 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4452 if (! note)
4453 continue;
4454 remove_note (insn, note);
4455 } while (insn != end && (insn = NEXT_INSN (insn)));
4457 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4458 notes referring to the registers being set might become invalid. */
4459 if (merge_set)
4461 unsigned i;
4462 bitmap_iterator bi;
4464 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4465 remove_reg_equal_equiv_notes_for_regno (i);
4467 BITMAP_FREE (merge_set);
4470 reorder_insns (head, end, PREV_INSN (earliest));
4473 /* Remove the jump and edge if we can. */
4474 if (other_bb == new_dest)
4476 delete_insn (jump);
4477 remove_edge (BRANCH_EDGE (test_bb));
4478 /* ??? Can't merge blocks here, as then_bb is still in use.
4479 At minimum, the merge will get done just before bb-reorder. */
4482 return TRUE;
4484 cancel:
4485 cancel_changes (0);
4487 if (merge_set)
4488 BITMAP_FREE (merge_set);
4490 return FALSE;
4493 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
4494 we are after combine pass. */
4496 static void
4497 if_convert (bool after_combine)
4499 basic_block bb;
4500 int pass;
4502 if (optimize == 1)
4504 df_live_add_problem ();
4505 df_live_set_all_dirty ();
4508 /* Record whether we are after combine pass. */
4509 ifcvt_after_combine = after_combine;
4510 num_possible_if_blocks = 0;
4511 num_updated_if_blocks = 0;
4512 num_true_changes = 0;
4514 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4515 mark_loop_exit_edges ();
4516 loop_optimizer_finalize ();
4517 free_dominance_info (CDI_DOMINATORS);
4519 /* Compute postdominators. */
4520 calculate_dominance_info (CDI_POST_DOMINATORS);
4522 df_set_flags (DF_LR_RUN_DCE);
4524 /* Go through each of the basic blocks looking for things to convert. If we
4525 have conditional execution, we make multiple passes to allow us to handle
4526 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4527 pass = 0;
4530 df_analyze ();
4531 /* Only need to do dce on the first pass. */
4532 df_clear_flags (DF_LR_RUN_DCE);
4533 cond_exec_changed_p = FALSE;
4534 pass++;
4536 #ifdef IFCVT_MULTIPLE_DUMPS
4537 if (dump_file && pass > 1)
4538 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4539 #endif
4541 FOR_EACH_BB_FN (bb, cfun)
4543 basic_block new_bb;
4544 while (!df_get_bb_dirty (bb)
4545 && (new_bb = find_if_header (bb, pass)) != NULL)
4546 bb = new_bb;
4549 #ifdef IFCVT_MULTIPLE_DUMPS
4550 if (dump_file && cond_exec_changed_p)
4551 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4552 #endif
4554 while (cond_exec_changed_p);
4556 #ifdef IFCVT_MULTIPLE_DUMPS
4557 if (dump_file)
4558 fprintf (dump_file, "\n\n========== no more changes\n");
4559 #endif
4561 free_dominance_info (CDI_POST_DOMINATORS);
4563 if (dump_file)
4564 fflush (dump_file);
4566 clear_aux_for_blocks ();
4568 /* If we allocated new pseudos, we must resize the array for sched1. */
4569 if (max_regno < max_reg_num ())
4570 max_regno = max_reg_num ();
4572 /* Write the final stats. */
4573 if (dump_file && num_possible_if_blocks > 0)
4575 fprintf (dump_file,
4576 "\n%d possible IF blocks searched.\n",
4577 num_possible_if_blocks);
4578 fprintf (dump_file,
4579 "%d IF blocks converted.\n",
4580 num_updated_if_blocks);
4581 fprintf (dump_file,
4582 "%d true changes made.\n\n\n",
4583 num_true_changes);
4586 if (optimize == 1)
4587 df_remove_problem (df_live);
4589 #ifdef ENABLE_CHECKING
4590 verify_flow_info ();
4591 #endif
4594 /* If-conversion and CFG cleanup. */
4595 static unsigned int
4596 rest_of_handle_if_conversion (void)
4598 if (flag_if_conversion)
4600 if (dump_file)
4602 dump_reg_info (dump_file);
4603 dump_flow_info (dump_file, dump_flags);
4605 cleanup_cfg (CLEANUP_EXPENSIVE);
4606 if_convert (false);
4609 cleanup_cfg (0);
4610 return 0;
4613 namespace {
4615 const pass_data pass_data_rtl_ifcvt =
4617 RTL_PASS, /* type */
4618 "ce1", /* name */
4619 OPTGROUP_NONE, /* optinfo_flags */
4620 TV_IFCVT, /* tv_id */
4621 0, /* properties_required */
4622 0, /* properties_provided */
4623 0, /* properties_destroyed */
4624 0, /* todo_flags_start */
4625 TODO_df_finish, /* todo_flags_finish */
4628 class pass_rtl_ifcvt : public rtl_opt_pass
4630 public:
4631 pass_rtl_ifcvt (gcc::context *ctxt)
4632 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4635 /* opt_pass methods: */
4636 virtual bool gate (function *)
4638 return (optimize > 0) && dbg_cnt (if_conversion);
4641 virtual unsigned int execute (function *)
4643 return rest_of_handle_if_conversion ();
4646 }; // class pass_rtl_ifcvt
4648 } // anon namespace
4650 rtl_opt_pass *
4651 make_pass_rtl_ifcvt (gcc::context *ctxt)
4653 return new pass_rtl_ifcvt (ctxt);
4657 /* Rerun if-conversion, as combine may have simplified things enough
4658 to now meet sequence length restrictions. */
4660 namespace {
4662 const pass_data pass_data_if_after_combine =
4664 RTL_PASS, /* type */
4665 "ce2", /* name */
4666 OPTGROUP_NONE, /* optinfo_flags */
4667 TV_IFCVT, /* tv_id */
4668 0, /* properties_required */
4669 0, /* properties_provided */
4670 0, /* properties_destroyed */
4671 0, /* todo_flags_start */
4672 TODO_df_finish, /* todo_flags_finish */
4675 class pass_if_after_combine : public rtl_opt_pass
4677 public:
4678 pass_if_after_combine (gcc::context *ctxt)
4679 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4682 /* opt_pass methods: */
4683 virtual bool gate (function *)
4685 return optimize > 0 && flag_if_conversion
4686 && dbg_cnt (if_after_combine);
4689 virtual unsigned int execute (function *)
4691 if_convert (true);
4692 return 0;
4695 }; // class pass_if_after_combine
4697 } // anon namespace
4699 rtl_opt_pass *
4700 make_pass_if_after_combine (gcc::context *ctxt)
4702 return new pass_if_after_combine (ctxt);
4706 namespace {
4708 const pass_data pass_data_if_after_reload =
4710 RTL_PASS, /* type */
4711 "ce3", /* name */
4712 OPTGROUP_NONE, /* optinfo_flags */
4713 TV_IFCVT2, /* tv_id */
4714 0, /* properties_required */
4715 0, /* properties_provided */
4716 0, /* properties_destroyed */
4717 0, /* todo_flags_start */
4718 TODO_df_finish, /* todo_flags_finish */
4721 class pass_if_after_reload : public rtl_opt_pass
4723 public:
4724 pass_if_after_reload (gcc::context *ctxt)
4725 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4728 /* opt_pass methods: */
4729 virtual bool gate (function *)
4731 return optimize > 0 && flag_if_conversion2
4732 && dbg_cnt (if_after_reload);
4735 virtual unsigned int execute (function *)
4737 if_convert (true);
4738 return 0;
4741 }; // class pass_if_after_reload
4743 } // anon namespace
4745 rtl_opt_pass *
4746 make_pass_if_after_reload (gcc::context *ctxt)
4748 return new pass_if_after_reload (ctxt);