always define HAVE_peephole2
[official-gcc.git] / gcc / ifcvt.c
bloba46efec605958453db941896262306f10fc937b0
1 /* If-conversion support.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "df.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "except.h"
34 #include "cfgrtl.h"
35 #include "cfganal.h"
36 #include "cfgcleanup.h"
37 #include "alias.h"
38 #include "expmed.h"
39 #include "dojump.h"
40 #include "explow.h"
41 #include "calls.h"
42 #include "emit-rtl.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "diagnostic-core.h"
50 #include "tm_p.h"
51 #include "cfgloop.h"
52 #include "target.h"
53 #include "tree-pass.h"
54 #include "dbgcnt.h"
55 #include "shrink-wrap.h"
56 #include "ifcvt.h"
58 #ifndef HAVE_incscc
59 #define HAVE_incscc 0
60 #endif
61 #ifndef HAVE_decscc
62 #define HAVE_decscc 0
63 #endif
65 #ifndef MAX_CONDITIONAL_EXECUTE
66 #define MAX_CONDITIONAL_EXECUTE \
67 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
68 + 1)
69 #endif
71 #ifndef HAVE_cbranchcc4
72 #define HAVE_cbranchcc4 0
73 #endif
75 #define IFCVT_MULTIPLE_DUMPS 1
77 #define NULL_BLOCK ((basic_block) NULL)
79 /* True if after combine pass. */
80 static bool ifcvt_after_combine;
82 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
83 static int num_possible_if_blocks;
85 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
86 execution. */
87 static int num_updated_if_blocks;
89 /* # of changes made. */
90 static int num_true_changes;
92 /* Whether conditional execution changes were made. */
93 static int cond_exec_changed_p;
95 /* Forward references. */
96 static int count_bb_insns (const_basic_block);
97 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
98 static rtx_insn *first_active_insn (basic_block);
99 static rtx_insn *last_active_insn (basic_block, int);
100 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
101 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
102 static basic_block block_fallthru (basic_block);
103 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
104 int);
105 static rtx cond_exec_get_condition (rtx_insn *);
106 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
107 static int noce_operand_ok (const_rtx);
108 static void merge_if_block (ce_if_block *);
109 static int find_cond_trap (basic_block, edge, edge);
110 static basic_block find_if_header (basic_block, int);
111 static int block_jumps_and_fallthru_p (basic_block, basic_block);
112 static int noce_find_if_block (basic_block, edge, edge, int);
113 static int cond_exec_find_if_block (ce_if_block *);
114 static int find_if_case_1 (basic_block, edge, edge);
115 static int find_if_case_2 (basic_block, edge, edge);
116 static int dead_or_predicable (basic_block, basic_block, basic_block,
117 edge, int);
118 static void noce_emit_move_insn (rtx, rtx);
119 static rtx_insn *block_has_only_trap (basic_block);
121 /* Count the number of non-jump active insns in BB. */
123 static int
124 count_bb_insns (const_basic_block bb)
126 int count = 0;
127 rtx_insn *insn = BB_HEAD (bb);
129 while (1)
131 if (active_insn_p (insn) && !JUMP_P (insn))
132 count++;
134 if (insn == BB_END (bb))
135 break;
136 insn = NEXT_INSN (insn);
139 return count;
142 /* Determine whether the total insn_rtx_cost on non-jump insns in
143 basic block BB is less than MAX_COST. This function returns
144 false if the cost of any instruction could not be estimated.
146 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
147 as those insns are being speculated. MAX_COST is scaled with SCALE
148 plus a small fudge factor. */
150 static bool
151 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
153 int count = 0;
154 rtx_insn *insn = BB_HEAD (bb);
155 bool speed = optimize_bb_for_speed_p (bb);
157 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
158 applied to insn_rtx_cost when optimizing for size. Only do
159 this after combine because if-conversion might interfere with
160 passes before combine.
162 Use optimize_function_for_speed_p instead of the pre-defined
163 variable speed to make sure it is set to same value for all
164 basic blocks in one if-conversion transformation. */
165 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
166 scale = REG_BR_PROB_BASE;
167 /* Our branch probability/scaling factors are just estimates and don't
168 account for cases where we can get speculation for free and other
169 secondary benefits. So we fudge the scale factor to make speculating
170 appear a little more profitable when optimizing for performance. */
171 else
172 scale += REG_BR_PROB_BASE / 8;
175 max_cost *= scale;
177 while (1)
179 if (NONJUMP_INSN_P (insn))
181 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
182 if (cost == 0)
183 return false;
185 /* If this instruction is the load or set of a "stack" register,
186 such as a floating point register on x87, then the cost of
187 speculatively executing this insn may need to include
188 the additional cost of popping its result off of the
189 register stack. Unfortunately, correctly recognizing and
190 accounting for this additional overhead is tricky, so for
191 now we simply prohibit such speculative execution. */
192 #ifdef STACK_REGS
194 rtx set = single_set (insn);
195 if (set && STACK_REG_P (SET_DEST (set)))
196 return false;
198 #endif
200 count += cost;
201 if (count >= max_cost)
202 return false;
204 else if (CALL_P (insn))
205 return false;
207 if (insn == BB_END (bb))
208 break;
209 insn = NEXT_INSN (insn);
212 return true;
215 /* Return the first non-jump active insn in the basic block. */
217 static rtx_insn *
218 first_active_insn (basic_block bb)
220 rtx_insn *insn = BB_HEAD (bb);
222 if (LABEL_P (insn))
224 if (insn == BB_END (bb))
225 return NULL;
226 insn = NEXT_INSN (insn);
229 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
231 if (insn == BB_END (bb))
232 return NULL;
233 insn = NEXT_INSN (insn);
236 if (JUMP_P (insn))
237 return NULL;
239 return insn;
242 /* Return the last non-jump active (non-jump) insn in the basic block. */
244 static rtx_insn *
245 last_active_insn (basic_block bb, int skip_use_p)
247 rtx_insn *insn = BB_END (bb);
248 rtx_insn *head = BB_HEAD (bb);
250 while (NOTE_P (insn)
251 || JUMP_P (insn)
252 || DEBUG_INSN_P (insn)
253 || (skip_use_p
254 && NONJUMP_INSN_P (insn)
255 && GET_CODE (PATTERN (insn)) == USE))
257 if (insn == head)
258 return NULL;
259 insn = PREV_INSN (insn);
262 if (LABEL_P (insn))
263 return NULL;
265 return insn;
268 /* Return the active insn before INSN inside basic block CURR_BB. */
270 static rtx_insn *
271 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
273 if (!insn || insn == BB_HEAD (curr_bb))
274 return NULL;
276 while ((insn = PREV_INSN (insn)) != NULL_RTX)
278 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
279 break;
281 /* No other active insn all the way to the start of the basic block. */
282 if (insn == BB_HEAD (curr_bb))
283 return NULL;
286 return insn;
289 /* Return the active insn after INSN inside basic block CURR_BB. */
291 static rtx_insn *
292 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
294 if (!insn || insn == BB_END (curr_bb))
295 return NULL;
297 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
299 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
300 break;
302 /* No other active insn all the way to the end of the basic block. */
303 if (insn == BB_END (curr_bb))
304 return NULL;
307 return insn;
310 /* Return the basic block reached by falling though the basic block BB. */
312 static basic_block
313 block_fallthru (basic_block bb)
315 edge e = find_fallthru_edge (bb->succs);
317 return (e) ? e->dest : NULL_BLOCK;
320 /* Return true if RTXs A and B can be safely interchanged. */
322 static bool
323 rtx_interchangeable_p (const_rtx a, const_rtx b)
325 if (!rtx_equal_p (a, b))
326 return false;
328 if (GET_CODE (a) != MEM)
329 return true;
331 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
332 reference is not. Interchanging a dead type-unsafe memory reference with
333 a live type-safe one creates a live type-unsafe memory reference, in other
334 words, it makes the program illegal.
335 We check here conservatively whether the two memory references have equal
336 memory attributes. */
338 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
342 /* Go through a bunch of insns, converting them to conditional
343 execution format if possible. Return TRUE if all of the non-note
344 insns were processed. */
346 static int
347 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
348 /* if block information */rtx_insn *start,
349 /* first insn to look at */rtx end,
350 /* last insn to look at */rtx test,
351 /* conditional execution test */int prob_val,
352 /* probability of branch taken. */int mod_ok)
354 int must_be_last = FALSE;
355 rtx_insn *insn;
356 rtx xtest;
357 rtx pattern;
359 if (!start || !end)
360 return FALSE;
362 for (insn = start; ; insn = NEXT_INSN (insn))
364 /* dwarf2out can't cope with conditional prologues. */
365 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
366 return FALSE;
368 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
369 goto insn_done;
371 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
373 /* dwarf2out can't cope with conditional unwind info. */
374 if (RTX_FRAME_RELATED_P (insn))
375 return FALSE;
377 /* Remove USE insns that get in the way. */
378 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
380 /* ??? Ug. Actually unlinking the thing is problematic,
381 given what we'd have to coordinate with our callers. */
382 SET_INSN_DELETED (insn);
383 goto insn_done;
386 /* Last insn wasn't last? */
387 if (must_be_last)
388 return FALSE;
390 if (modified_in_p (test, insn))
392 if (!mod_ok)
393 return FALSE;
394 must_be_last = TRUE;
397 /* Now build the conditional form of the instruction. */
398 pattern = PATTERN (insn);
399 xtest = copy_rtx (test);
401 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
402 two conditions. */
403 if (GET_CODE (pattern) == COND_EXEC)
405 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
406 return FALSE;
408 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
409 COND_EXEC_TEST (pattern));
410 pattern = COND_EXEC_CODE (pattern);
413 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
415 /* If the machine needs to modify the insn being conditionally executed,
416 say for example to force a constant integer operand into a temp
417 register, do so here. */
418 #ifdef IFCVT_MODIFY_INSN
419 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
420 if (! pattern)
421 return FALSE;
422 #endif
424 validate_change (insn, &PATTERN (insn), pattern, 1);
426 if (CALL_P (insn) && prob_val >= 0)
427 validate_change (insn, &REG_NOTES (insn),
428 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
429 prob_val, REG_NOTES (insn)), 1);
431 insn_done:
432 if (insn == end)
433 break;
436 return TRUE;
439 /* Return the condition for a jump. Do not do any special processing. */
441 static rtx
442 cond_exec_get_condition (rtx_insn *jump)
444 rtx test_if, cond;
446 if (any_condjump_p (jump))
447 test_if = SET_SRC (pc_set (jump));
448 else
449 return NULL_RTX;
450 cond = XEXP (test_if, 0);
452 /* If this branches to JUMP_LABEL when the condition is false,
453 reverse the condition. */
454 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
455 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
457 enum rtx_code rev = reversed_comparison_code (cond, jump);
458 if (rev == UNKNOWN)
459 return NULL_RTX;
461 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
462 XEXP (cond, 1));
465 return cond;
468 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
469 to conditional execution. Return TRUE if we were successful at
470 converting the block. */
472 static int
473 cond_exec_process_if_block (ce_if_block * ce_info,
474 /* if block information */int do_multiple_p)
476 basic_block test_bb = ce_info->test_bb; /* last test block */
477 basic_block then_bb = ce_info->then_bb; /* THEN */
478 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
479 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
480 rtx_insn *then_start; /* first insn in THEN block */
481 rtx_insn *then_end; /* last insn + 1 in THEN block */
482 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
483 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
484 int max; /* max # of insns to convert. */
485 int then_mod_ok; /* whether conditional mods are ok in THEN */
486 rtx true_expr; /* test for else block insns */
487 rtx false_expr; /* test for then block insns */
488 int true_prob_val; /* probability of else block */
489 int false_prob_val; /* probability of then block */
490 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
491 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
492 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
493 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
494 int then_n_insns, else_n_insns, n_insns;
495 enum rtx_code false_code;
496 rtx note;
498 /* If test is comprised of && or || elements, and we've failed at handling
499 all of them together, just use the last test if it is the special case of
500 && elements without an ELSE block. */
501 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
503 if (else_bb || ! ce_info->and_and_p)
504 return FALSE;
506 ce_info->test_bb = test_bb = ce_info->last_test_bb;
507 ce_info->num_multiple_test_blocks = 0;
508 ce_info->num_and_and_blocks = 0;
509 ce_info->num_or_or_blocks = 0;
512 /* Find the conditional jump to the ELSE or JOIN part, and isolate
513 the test. */
514 test_expr = cond_exec_get_condition (BB_END (test_bb));
515 if (! test_expr)
516 return FALSE;
518 /* If the conditional jump is more than just a conditional jump,
519 then we can not do conditional execution conversion on this block. */
520 if (! onlyjump_p (BB_END (test_bb)))
521 return FALSE;
523 /* Collect the bounds of where we're to search, skipping any labels, jumps
524 and notes at the beginning and end of the block. Then count the total
525 number of insns and see if it is small enough to convert. */
526 then_start = first_active_insn (then_bb);
527 then_end = last_active_insn (then_bb, TRUE);
528 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
529 n_insns = then_n_insns;
530 max = MAX_CONDITIONAL_EXECUTE;
532 if (else_bb)
534 int n_matching;
536 max *= 2;
537 else_start = first_active_insn (else_bb);
538 else_end = last_active_insn (else_bb, TRUE);
539 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
540 n_insns += else_n_insns;
542 /* Look for matching sequences at the head and tail of the two blocks,
543 and limit the range of insns to be converted if possible. */
544 n_matching = flow_find_cross_jump (then_bb, else_bb,
545 &then_first_tail, &else_first_tail,
546 NULL);
547 if (then_first_tail == BB_HEAD (then_bb))
548 then_start = then_end = NULL;
549 if (else_first_tail == BB_HEAD (else_bb))
550 else_start = else_end = NULL;
552 if (n_matching > 0)
554 if (then_end)
555 then_end = find_active_insn_before (then_bb, then_first_tail);
556 if (else_end)
557 else_end = find_active_insn_before (else_bb, else_first_tail);
558 n_insns -= 2 * n_matching;
561 if (then_start
562 && else_start
563 && then_n_insns > n_matching
564 && else_n_insns > n_matching)
566 int longest_match = MIN (then_n_insns - n_matching,
567 else_n_insns - n_matching);
568 n_matching
569 = flow_find_head_matching_sequence (then_bb, else_bb,
570 &then_last_head,
571 &else_last_head,
572 longest_match);
574 if (n_matching > 0)
576 rtx_insn *insn;
578 /* We won't pass the insns in the head sequence to
579 cond_exec_process_insns, so we need to test them here
580 to make sure that they don't clobber the condition. */
581 for (insn = BB_HEAD (then_bb);
582 insn != NEXT_INSN (then_last_head);
583 insn = NEXT_INSN (insn))
584 if (!LABEL_P (insn) && !NOTE_P (insn)
585 && !DEBUG_INSN_P (insn)
586 && modified_in_p (test_expr, insn))
587 return FALSE;
590 if (then_last_head == then_end)
591 then_start = then_end = NULL;
592 if (else_last_head == else_end)
593 else_start = else_end = NULL;
595 if (n_matching > 0)
597 if (then_start)
598 then_start = find_active_insn_after (then_bb, then_last_head);
599 if (else_start)
600 else_start = find_active_insn_after (else_bb, else_last_head);
601 n_insns -= 2 * n_matching;
606 if (n_insns > max)
607 return FALSE;
609 /* Map test_expr/test_jump into the appropriate MD tests to use on
610 the conditionally executed code. */
612 true_expr = test_expr;
614 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
615 if (false_code != UNKNOWN)
616 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
617 XEXP (true_expr, 0), XEXP (true_expr, 1));
618 else
619 false_expr = NULL_RTX;
621 #ifdef IFCVT_MODIFY_TESTS
622 /* If the machine description needs to modify the tests, such as setting a
623 conditional execution register from a comparison, it can do so here. */
624 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
626 /* See if the conversion failed. */
627 if (!true_expr || !false_expr)
628 goto fail;
629 #endif
631 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
632 if (note)
634 true_prob_val = XINT (note, 0);
635 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
637 else
639 true_prob_val = -1;
640 false_prob_val = -1;
643 /* If we have && or || tests, do them here. These tests are in the adjacent
644 blocks after the first block containing the test. */
645 if (ce_info->num_multiple_test_blocks > 0)
647 basic_block bb = test_bb;
648 basic_block last_test_bb = ce_info->last_test_bb;
650 if (! false_expr)
651 goto fail;
655 rtx_insn *start, *end;
656 rtx t, f;
657 enum rtx_code f_code;
659 bb = block_fallthru (bb);
660 start = first_active_insn (bb);
661 end = last_active_insn (bb, TRUE);
662 if (start
663 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
664 false_prob_val, FALSE))
665 goto fail;
667 /* If the conditional jump is more than just a conditional jump, then
668 we can not do conditional execution conversion on this block. */
669 if (! onlyjump_p (BB_END (bb)))
670 goto fail;
672 /* Find the conditional jump and isolate the test. */
673 t = cond_exec_get_condition (BB_END (bb));
674 if (! t)
675 goto fail;
677 f_code = reversed_comparison_code (t, BB_END (bb));
678 if (f_code == UNKNOWN)
679 goto fail;
681 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
682 if (ce_info->and_and_p)
684 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
685 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
687 else
689 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
690 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
693 /* If the machine description needs to modify the tests, such as
694 setting a conditional execution register from a comparison, it can
695 do so here. */
696 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
697 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
699 /* See if the conversion failed. */
700 if (!t || !f)
701 goto fail;
702 #endif
704 true_expr = t;
705 false_expr = f;
707 while (bb != last_test_bb);
710 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
711 on then THEN block. */
712 then_mod_ok = (else_bb == NULL_BLOCK);
714 /* Go through the THEN and ELSE blocks converting the insns if possible
715 to conditional execution. */
717 if (then_end
718 && (! false_expr
719 || ! cond_exec_process_insns (ce_info, then_start, then_end,
720 false_expr, false_prob_val,
721 then_mod_ok)))
722 goto fail;
724 if (else_bb && else_end
725 && ! cond_exec_process_insns (ce_info, else_start, else_end,
726 true_expr, true_prob_val, TRUE))
727 goto fail;
729 /* If we cannot apply the changes, fail. Do not go through the normal fail
730 processing, since apply_change_group will call cancel_changes. */
731 if (! apply_change_group ())
733 #ifdef IFCVT_MODIFY_CANCEL
734 /* Cancel any machine dependent changes. */
735 IFCVT_MODIFY_CANCEL (ce_info);
736 #endif
737 return FALSE;
740 #ifdef IFCVT_MODIFY_FINAL
741 /* Do any machine dependent final modifications. */
742 IFCVT_MODIFY_FINAL (ce_info);
743 #endif
745 /* Conversion succeeded. */
746 if (dump_file)
747 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
748 n_insns, (n_insns == 1) ? " was" : "s were");
750 /* Merge the blocks! If we had matching sequences, make sure to delete one
751 copy at the appropriate location first: delete the copy in the THEN branch
752 for a tail sequence so that the remaining one is executed last for both
753 branches, and delete the copy in the ELSE branch for a head sequence so
754 that the remaining one is executed first for both branches. */
755 if (then_first_tail)
757 rtx_insn *from = then_first_tail;
758 if (!INSN_P (from))
759 from = find_active_insn_after (then_bb, from);
760 delete_insn_chain (from, BB_END (then_bb), false);
762 if (else_last_head)
763 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
765 merge_if_block (ce_info);
766 cond_exec_changed_p = TRUE;
767 return TRUE;
769 fail:
770 #ifdef IFCVT_MODIFY_CANCEL
771 /* Cancel any machine dependent changes. */
772 IFCVT_MODIFY_CANCEL (ce_info);
773 #endif
775 cancel_changes (0);
776 return FALSE;
779 /* Used by noce_process_if_block to communicate with its subroutines.
781 The subroutines know that A and B may be evaluated freely. They
782 know that X is a register. They should insert new instructions
783 before cond_earliest. */
785 struct noce_if_info
787 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
788 basic_block test_bb, then_bb, else_bb, join_bb;
790 /* The jump that ends TEST_BB. */
791 rtx_insn *jump;
793 /* The jump condition. */
794 rtx cond;
796 /* New insns should be inserted before this one. */
797 rtx_insn *cond_earliest;
799 /* Insns in the THEN and ELSE block. There is always just this
800 one insns in those blocks. The insns are single_set insns.
801 If there was no ELSE block, INSN_B is the last insn before
802 COND_EARLIEST, or NULL_RTX. In the former case, the insn
803 operands are still valid, as if INSN_B was moved down below
804 the jump. */
805 rtx_insn *insn_a, *insn_b;
807 /* The SET_SRC of INSN_A and INSN_B. */
808 rtx a, b;
810 /* The SET_DEST of INSN_A. */
811 rtx x;
813 /* True if this if block is not canonical. In the canonical form of
814 if blocks, the THEN_BB is the block reached via the fallthru edge
815 from TEST_BB. For the noce transformations, we allow the symmetric
816 form as well. */
817 bool then_else_reversed;
819 /* Estimated cost of the particular branch instruction. */
820 int branch_cost;
823 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
824 static int noce_try_move (struct noce_if_info *);
825 static int noce_try_store_flag (struct noce_if_info *);
826 static int noce_try_addcc (struct noce_if_info *);
827 static int noce_try_store_flag_constants (struct noce_if_info *);
828 static int noce_try_store_flag_mask (struct noce_if_info *);
829 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
830 rtx, rtx, rtx);
831 static int noce_try_cmove (struct noce_if_info *);
832 static int noce_try_cmove_arith (struct noce_if_info *);
833 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
834 static int noce_try_minmax (struct noce_if_info *);
835 static int noce_try_abs (struct noce_if_info *);
836 static int noce_try_sign_mask (struct noce_if_info *);
838 /* Helper function for noce_try_store_flag*. */
840 static rtx
841 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
842 int normalize)
844 rtx cond = if_info->cond;
845 int cond_complex;
846 enum rtx_code code;
848 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
849 || ! general_operand (XEXP (cond, 1), VOIDmode));
851 /* If earliest == jump, or when the condition is complex, try to
852 build the store_flag insn directly. */
854 if (cond_complex)
856 rtx set = pc_set (if_info->jump);
857 cond = XEXP (SET_SRC (set), 0);
858 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
859 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
860 reversep = !reversep;
861 if (if_info->then_else_reversed)
862 reversep = !reversep;
865 if (reversep)
866 code = reversed_comparison_code (cond, if_info->jump);
867 else
868 code = GET_CODE (cond);
870 if ((if_info->cond_earliest == if_info->jump || cond_complex)
871 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
873 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
874 XEXP (cond, 1));
875 rtx set = gen_rtx_SET (x, src);
877 start_sequence ();
878 rtx_insn *insn = emit_insn (set);
880 if (recog_memoized (insn) >= 0)
882 rtx_insn *seq = get_insns ();
883 end_sequence ();
884 emit_insn (seq);
886 if_info->cond_earliest = if_info->jump;
888 return x;
891 end_sequence ();
894 /* Don't even try if the comparison operands or the mode of X are weird. */
895 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
896 return NULL_RTX;
898 return emit_store_flag (x, code, XEXP (cond, 0),
899 XEXP (cond, 1), VOIDmode,
900 (code == LTU || code == LEU
901 || code == GEU || code == GTU), normalize);
904 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
905 X is the destination/target and Y is the value to copy. */
907 static void
908 noce_emit_move_insn (rtx x, rtx y)
910 machine_mode outmode;
911 rtx outer, inner;
912 int bitpos;
914 if (GET_CODE (x) != STRICT_LOW_PART)
916 rtx_insn *seq, *insn;
917 rtx target;
918 optab ot;
920 start_sequence ();
921 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
922 otherwise construct a suitable SET pattern ourselves. */
923 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
924 ? emit_move_insn (x, y)
925 : emit_insn (gen_rtx_SET (x, y));
926 seq = get_insns ();
927 end_sequence ();
929 if (recog_memoized (insn) <= 0)
931 if (GET_CODE (x) == ZERO_EXTRACT)
933 rtx op = XEXP (x, 0);
934 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
935 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
937 /* store_bit_field expects START to be relative to
938 BYTES_BIG_ENDIAN and adjusts this value for machines with
939 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
940 invoke store_bit_field again it is necessary to have the START
941 value from the first call. */
942 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
944 if (MEM_P (op))
945 start = BITS_PER_UNIT - start - size;
946 else
948 gcc_assert (REG_P (op));
949 start = BITS_PER_WORD - start - size;
953 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
954 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
955 return;
958 switch (GET_RTX_CLASS (GET_CODE (y)))
960 case RTX_UNARY:
961 ot = code_to_optab (GET_CODE (y));
962 if (ot)
964 start_sequence ();
965 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
966 if (target != NULL_RTX)
968 if (target != x)
969 emit_move_insn (x, target);
970 seq = get_insns ();
972 end_sequence ();
974 break;
976 case RTX_BIN_ARITH:
977 case RTX_COMM_ARITH:
978 ot = code_to_optab (GET_CODE (y));
979 if (ot)
981 start_sequence ();
982 target = expand_binop (GET_MODE (y), ot,
983 XEXP (y, 0), XEXP (y, 1),
984 x, 0, OPTAB_DIRECT);
985 if (target != NULL_RTX)
987 if (target != x)
988 emit_move_insn (x, target);
989 seq = get_insns ();
991 end_sequence ();
993 break;
995 default:
996 break;
1000 emit_insn (seq);
1001 return;
1004 outer = XEXP (x, 0);
1005 inner = XEXP (outer, 0);
1006 outmode = GET_MODE (outer);
1007 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1008 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1009 0, 0, outmode, y);
1012 /* Return the CC reg if it is used in COND. */
1014 static rtx
1015 cc_in_cond (rtx cond)
1017 if (HAVE_cbranchcc4 && cond
1018 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1019 return XEXP (cond, 0);
1021 return NULL_RTX;
1024 /* Return sequence of instructions generated by if conversion. This
1025 function calls end_sequence() to end the current stream, ensures
1026 that the instructions are unshared, recognizable non-jump insns.
1027 On failure, this function returns a NULL_RTX. */
1029 static rtx_insn *
1030 end_ifcvt_sequence (struct noce_if_info *if_info)
1032 rtx_insn *insn;
1033 rtx_insn *seq = get_insns ();
1034 rtx cc = cc_in_cond (if_info->cond);
1036 set_used_flags (if_info->x);
1037 set_used_flags (if_info->cond);
1038 set_used_flags (if_info->a);
1039 set_used_flags (if_info->b);
1040 unshare_all_rtl_in_chain (seq);
1041 end_sequence ();
1043 /* Make sure that all of the instructions emitted are recognizable,
1044 and that we haven't introduced a new jump instruction.
1045 As an exercise for the reader, build a general mechanism that
1046 allows proper placement of required clobbers. */
1047 for (insn = seq; insn; insn = NEXT_INSN (insn))
1048 if (JUMP_P (insn)
1049 || recog_memoized (insn) == -1
1050 /* Make sure new generated code does not clobber CC. */
1051 || (cc && set_of (cc, insn)))
1052 return NULL;
1054 return seq;
1057 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1058 "if (a == b) x = a; else x = b" into "x = b". */
1060 static int
1061 noce_try_move (struct noce_if_info *if_info)
1063 rtx cond = if_info->cond;
1064 enum rtx_code code = GET_CODE (cond);
1065 rtx y;
1066 rtx_insn *seq;
1068 if (code != NE && code != EQ)
1069 return FALSE;
1071 /* This optimization isn't valid if either A or B could be a NaN
1072 or a signed zero. */
1073 if (HONOR_NANS (if_info->x)
1074 || HONOR_SIGNED_ZEROS (if_info->x))
1075 return FALSE;
1077 /* Check whether the operands of the comparison are A and in
1078 either order. */
1079 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1080 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1081 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1082 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1084 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1085 return FALSE;
1087 y = (code == EQ) ? if_info->a : if_info->b;
1089 /* Avoid generating the move if the source is the destination. */
1090 if (! rtx_equal_p (if_info->x, y))
1092 start_sequence ();
1093 noce_emit_move_insn (if_info->x, y);
1094 seq = end_ifcvt_sequence (if_info);
1095 if (!seq)
1096 return FALSE;
1098 emit_insn_before_setloc (seq, if_info->jump,
1099 INSN_LOCATION (if_info->insn_a));
1101 return TRUE;
1103 return FALSE;
1106 /* Convert "if (test) x = 1; else x = 0".
1108 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1109 tried in noce_try_store_flag_constants after noce_try_cmove has had
1110 a go at the conversion. */
1112 static int
1113 noce_try_store_flag (struct noce_if_info *if_info)
1115 int reversep;
1116 rtx target;
1117 rtx_insn *seq;
1119 if (CONST_INT_P (if_info->b)
1120 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1121 && if_info->a == const0_rtx)
1122 reversep = 0;
1123 else if (if_info->b == const0_rtx
1124 && CONST_INT_P (if_info->a)
1125 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1126 && (reversed_comparison_code (if_info->cond, if_info->jump)
1127 != UNKNOWN))
1128 reversep = 1;
1129 else
1130 return FALSE;
1132 start_sequence ();
1134 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1135 if (target)
1137 if (target != if_info->x)
1138 noce_emit_move_insn (if_info->x, target);
1140 seq = end_ifcvt_sequence (if_info);
1141 if (! seq)
1142 return FALSE;
1144 emit_insn_before_setloc (seq, if_info->jump,
1145 INSN_LOCATION (if_info->insn_a));
1146 return TRUE;
1148 else
1150 end_sequence ();
1151 return FALSE;
1155 /* Convert "if (test) x = a; else x = b", for A and B constant.
1156 Also allow A = y + c1, B = y + c2, with a common y between A
1157 and B. */
1159 static int
1160 noce_try_store_flag_constants (struct noce_if_info *if_info)
1162 rtx target;
1163 rtx_insn *seq;
1164 bool reversep;
1165 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1166 int normalize;
1167 bool can_reverse;
1168 machine_mode mode = GET_MODE (if_info->x);;
1169 rtx common = NULL_RTX;
1171 rtx a = if_info->a;
1172 rtx b = if_info->b;
1174 /* Handle cases like x := test ? y + 3 : y + 4. */
1175 if (GET_CODE (a) == PLUS
1176 && GET_CODE (b) == PLUS
1177 && CONST_INT_P (XEXP (a, 1))
1178 && CONST_INT_P (XEXP (b, 1))
1179 && rtx_equal_p (XEXP (a, 0), XEXP (b, 0))
1180 && noce_operand_ok (XEXP (a, 0))
1181 && if_info->branch_cost >= 2)
1183 common = XEXP (a, 0);
1184 a = XEXP (a, 1);
1185 b = XEXP (b, 1);
1188 if (CONST_INT_P (a)
1189 && CONST_INT_P (b))
1191 ifalse = INTVAL (a);
1192 itrue = INTVAL (b);
1193 bool subtract_flag_p = false;
1195 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1196 /* Make sure we can represent the difference between the two values. */
1197 if ((diff > 0)
1198 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1199 return FALSE;
1201 diff = trunc_int_for_mode (diff, mode);
1203 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1204 != UNKNOWN);
1206 reversep = false;
1207 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1209 normalize = 0;
1210 /* We could collapse these cases but it is easier to follow the
1211 diff/STORE_FLAG_VALUE combinations when they are listed
1212 explicitly. */
1214 /* test ? 3 : 4
1215 => 4 + (test != 0). */
1216 if (diff < 0 && STORE_FLAG_VALUE < 0)
1217 reversep = false;
1218 /* test ? 4 : 3
1219 => can_reverse | 4 + (test == 0)
1220 !can_reverse | 3 - (test != 0). */
1221 else if (diff > 0 && STORE_FLAG_VALUE < 0)
1223 reversep = can_reverse;
1224 subtract_flag_p = !can_reverse;
1225 /* If we need to subtract the flag and we have PLUS-immediate
1226 A and B then it is unlikely to be beneficial to play tricks
1227 here. */
1228 if (subtract_flag_p && common)
1229 return FALSE;
1231 /* test ? 3 : 4
1232 => can_reverse | 3 + (test == 0)
1233 !can_reverse | 4 - (test != 0). */
1234 else if (diff < 0 && STORE_FLAG_VALUE > 0)
1236 reversep = can_reverse;
1237 subtract_flag_p = !can_reverse;
1238 /* If we need to subtract the flag and we have PLUS-immediate
1239 A and B then it is unlikely to be beneficial to play tricks
1240 here. */
1241 if (subtract_flag_p && common)
1242 return FALSE;
1244 /* test ? 4 : 3
1245 => 4 + (test != 0). */
1246 else if (diff > 0 && STORE_FLAG_VALUE > 0)
1247 reversep = false;
1248 else
1249 gcc_unreachable ();
1251 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1252 && (STORE_FLAG_VALUE == 1
1253 || if_info->branch_cost >= 2))
1254 normalize = 1;
1255 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1256 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1258 normalize = 1;
1259 reversep = true;
1261 else if (itrue == -1
1262 && (STORE_FLAG_VALUE == -1
1263 || if_info->branch_cost >= 2))
1264 normalize = -1;
1265 else if (ifalse == -1 && can_reverse
1266 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1268 normalize = -1;
1269 reversep = true;
1271 else
1272 return FALSE;
1274 if (reversep)
1276 std::swap (itrue, ifalse);
1277 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1280 start_sequence ();
1282 /* If we have x := test ? x + 3 : x + 4 then move the original
1283 x out of the way while we store flags. */
1284 if (common && rtx_equal_p (common, if_info->x))
1286 common = gen_reg_rtx (mode);
1287 noce_emit_move_insn (common, if_info->x);
1290 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1291 if (! target)
1293 end_sequence ();
1294 return FALSE;
1297 /* if (test) x = 3; else x = 4;
1298 => x = 3 + (test == 0); */
1299 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1301 /* Add the common part now. This may allow combine to merge this
1302 with the store flag operation earlier into some sort of conditional
1303 increment/decrement if the target allows it. */
1304 if (common)
1305 target = expand_simple_binop (mode, PLUS,
1306 target, common,
1307 target, 0, OPTAB_WIDEN);
1309 /* Always use ifalse here. It should have been swapped with itrue
1310 when appropriate when reversep is true. */
1311 target = expand_simple_binop (mode, subtract_flag_p ? MINUS : PLUS,
1312 gen_int_mode (ifalse, mode), target,
1313 if_info->x, 0, OPTAB_WIDEN);
1315 /* Other cases are not beneficial when the original A and B are PLUS
1316 expressions. */
1317 else if (common)
1319 end_sequence ();
1320 return FALSE;
1322 /* if (test) x = 8; else x = 0;
1323 => x = (test != 0) << 3; */
1324 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1326 target = expand_simple_binop (mode, ASHIFT,
1327 target, GEN_INT (tmp), if_info->x, 0,
1328 OPTAB_WIDEN);
1331 /* if (test) x = -1; else x = b;
1332 => x = -(test != 0) | b; */
1333 else if (itrue == -1)
1335 target = expand_simple_binop (mode, IOR,
1336 target, gen_int_mode (ifalse, mode),
1337 if_info->x, 0, OPTAB_WIDEN);
1339 else
1341 end_sequence ();
1342 return FALSE;
1345 if (! target)
1347 end_sequence ();
1348 return FALSE;
1351 if (target != if_info->x)
1352 noce_emit_move_insn (if_info->x, target);
1354 seq = end_ifcvt_sequence (if_info);
1355 if (!seq)
1356 return FALSE;
1358 emit_insn_before_setloc (seq, if_info->jump,
1359 INSN_LOCATION (if_info->insn_a));
1360 return TRUE;
1363 return FALSE;
1366 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1367 similarly for "foo--". */
1369 static int
1370 noce_try_addcc (struct noce_if_info *if_info)
1372 rtx target;
1373 rtx_insn *seq;
1374 int subtract, normalize;
1376 if (GET_CODE (if_info->a) == PLUS
1377 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1378 && (reversed_comparison_code (if_info->cond, if_info->jump)
1379 != UNKNOWN))
1381 rtx cond = if_info->cond;
1382 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1384 /* First try to use addcc pattern. */
1385 if (general_operand (XEXP (cond, 0), VOIDmode)
1386 && general_operand (XEXP (cond, 1), VOIDmode))
1388 start_sequence ();
1389 target = emit_conditional_add (if_info->x, code,
1390 XEXP (cond, 0),
1391 XEXP (cond, 1),
1392 VOIDmode,
1393 if_info->b,
1394 XEXP (if_info->a, 1),
1395 GET_MODE (if_info->x),
1396 (code == LTU || code == GEU
1397 || code == LEU || code == GTU));
1398 if (target)
1400 if (target != if_info->x)
1401 noce_emit_move_insn (if_info->x, target);
1403 seq = end_ifcvt_sequence (if_info);
1404 if (!seq)
1405 return FALSE;
1407 emit_insn_before_setloc (seq, if_info->jump,
1408 INSN_LOCATION (if_info->insn_a));
1409 return TRUE;
1411 end_sequence ();
1414 /* If that fails, construct conditional increment or decrement using
1415 setcc. */
1416 if (if_info->branch_cost >= 2
1417 && (XEXP (if_info->a, 1) == const1_rtx
1418 || XEXP (if_info->a, 1) == constm1_rtx))
1420 start_sequence ();
1421 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1422 subtract = 0, normalize = 0;
1423 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1424 subtract = 1, normalize = 0;
1425 else
1426 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1429 target = noce_emit_store_flag (if_info,
1430 gen_reg_rtx (GET_MODE (if_info->x)),
1431 1, normalize);
1433 if (target)
1434 target = expand_simple_binop (GET_MODE (if_info->x),
1435 subtract ? MINUS : PLUS,
1436 if_info->b, target, if_info->x,
1437 0, OPTAB_WIDEN);
1438 if (target)
1440 if (target != if_info->x)
1441 noce_emit_move_insn (if_info->x, target);
1443 seq = end_ifcvt_sequence (if_info);
1444 if (!seq)
1445 return FALSE;
1447 emit_insn_before_setloc (seq, if_info->jump,
1448 INSN_LOCATION (if_info->insn_a));
1449 return TRUE;
1451 end_sequence ();
1455 return FALSE;
1458 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1460 static int
1461 noce_try_store_flag_mask (struct noce_if_info *if_info)
1463 rtx target;
1464 rtx_insn *seq;
1465 int reversep;
1467 reversep = 0;
1468 if ((if_info->branch_cost >= 2
1469 || STORE_FLAG_VALUE == -1)
1470 && ((if_info->a == const0_rtx
1471 && rtx_equal_p (if_info->b, if_info->x))
1472 || ((reversep = (reversed_comparison_code (if_info->cond,
1473 if_info->jump)
1474 != UNKNOWN))
1475 && if_info->b == const0_rtx
1476 && rtx_equal_p (if_info->a, if_info->x))))
1478 start_sequence ();
1479 target = noce_emit_store_flag (if_info,
1480 gen_reg_rtx (GET_MODE (if_info->x)),
1481 reversep, -1);
1482 if (target)
1483 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1484 if_info->x,
1485 target, if_info->x, 0,
1486 OPTAB_WIDEN);
1488 if (target)
1490 int old_cost, new_cost, insn_cost;
1491 int speed_p;
1493 if (target != if_info->x)
1494 noce_emit_move_insn (if_info->x, target);
1496 seq = end_ifcvt_sequence (if_info);
1497 if (!seq)
1498 return FALSE;
1500 speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1501 insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1502 old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1503 new_cost = seq_cost (seq, speed_p);
1505 if (new_cost > old_cost)
1506 return FALSE;
1508 emit_insn_before_setloc (seq, if_info->jump,
1509 INSN_LOCATION (if_info->insn_a));
1510 return TRUE;
1513 end_sequence ();
1516 return FALSE;
1519 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1521 static rtx
1522 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1523 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1525 rtx target ATTRIBUTE_UNUSED;
1526 int unsignedp ATTRIBUTE_UNUSED;
1528 /* If earliest == jump, try to build the cmove insn directly.
1529 This is helpful when combine has created some complex condition
1530 (like for alpha's cmovlbs) that we can't hope to regenerate
1531 through the normal interface. */
1533 if (if_info->cond_earliest == if_info->jump)
1535 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1536 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1537 cond, vtrue, vfalse);
1538 rtx set = gen_rtx_SET (x, if_then_else);
1540 start_sequence ();
1541 rtx_insn *insn = emit_insn (set);
1543 if (recog_memoized (insn) >= 0)
1545 rtx_insn *seq = get_insns ();
1546 end_sequence ();
1547 emit_insn (seq);
1549 return x;
1552 end_sequence ();
1555 /* Don't even try if the comparison operands are weird
1556 except that the target supports cbranchcc4. */
1557 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1558 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1560 if (!(HAVE_cbranchcc4)
1561 || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1562 || cmp_b != const0_rtx)
1563 return NULL_RTX;
1566 unsignedp = (code == LTU || code == GEU
1567 || code == LEU || code == GTU);
1569 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1570 vtrue, vfalse, GET_MODE (x),
1571 unsignedp);
1572 if (target)
1573 return target;
1575 /* We might be faced with a situation like:
1577 x = (reg:M TARGET)
1578 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1579 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1581 We can't do a conditional move in mode M, but it's possible that we
1582 could do a conditional move in mode N instead and take a subreg of
1583 the result.
1585 If we can't create new pseudos, though, don't bother. */
1586 if (reload_completed)
1587 return NULL_RTX;
1589 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1591 rtx reg_vtrue = SUBREG_REG (vtrue);
1592 rtx reg_vfalse = SUBREG_REG (vfalse);
1593 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1594 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1595 rtx promoted_target;
1597 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1598 || byte_vtrue != byte_vfalse
1599 || (SUBREG_PROMOTED_VAR_P (vtrue)
1600 != SUBREG_PROMOTED_VAR_P (vfalse))
1601 || (SUBREG_PROMOTED_GET (vtrue)
1602 != SUBREG_PROMOTED_GET (vfalse)))
1603 return NULL_RTX;
1605 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1607 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1608 VOIDmode, reg_vtrue, reg_vfalse,
1609 GET_MODE (reg_vtrue), unsignedp);
1610 /* Nope, couldn't do it in that mode either. */
1611 if (!target)
1612 return NULL_RTX;
1614 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1615 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1616 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1617 emit_move_insn (x, target);
1618 return x;
1620 else
1621 return NULL_RTX;
1624 /* Try only simple constants and registers here. More complex cases
1625 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1626 has had a go at it. */
1628 static int
1629 noce_try_cmove (struct noce_if_info *if_info)
1631 enum rtx_code code;
1632 rtx target;
1633 rtx_insn *seq;
1635 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1636 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1638 start_sequence ();
1640 code = GET_CODE (if_info->cond);
1641 target = noce_emit_cmove (if_info, if_info->x, code,
1642 XEXP (if_info->cond, 0),
1643 XEXP (if_info->cond, 1),
1644 if_info->a, if_info->b);
1646 if (target)
1648 if (target != if_info->x)
1649 noce_emit_move_insn (if_info->x, target);
1651 seq = end_ifcvt_sequence (if_info);
1652 if (!seq)
1653 return FALSE;
1655 emit_insn_before_setloc (seq, if_info->jump,
1656 INSN_LOCATION (if_info->insn_a));
1657 return TRUE;
1659 /* If both a and b are constants try a last-ditch transformation:
1660 if (test) x = a; else x = b;
1661 => x = (-(test != 0) & (b - a)) + a;
1662 Try this only if the target-specific expansion above has failed.
1663 The target-specific expander may want to generate sequences that
1664 we don't know about, so give them a chance before trying this
1665 approach. */
1666 else if (!targetm.have_conditional_execution ()
1667 && CONST_INT_P (if_info->a) && CONST_INT_P (if_info->b)
1668 && ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1669 || if_info->branch_cost >= 3))
1671 machine_mode mode = GET_MODE (if_info->x);
1672 HOST_WIDE_INT ifalse = INTVAL (if_info->a);
1673 HOST_WIDE_INT itrue = INTVAL (if_info->b);
1674 rtx target = noce_emit_store_flag (if_info, if_info->x, false, -1);
1675 if (!target)
1677 end_sequence ();
1678 return FALSE;
1681 HOST_WIDE_INT diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1682 /* Make sure we can represent the difference
1683 between the two values. */
1684 if ((diff > 0)
1685 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1687 end_sequence ();
1688 return FALSE;
1691 diff = trunc_int_for_mode (diff, mode);
1692 target = expand_simple_binop (mode, AND,
1693 target, gen_int_mode (diff, mode),
1694 if_info->x, 0, OPTAB_WIDEN);
1695 if (target)
1696 target = expand_simple_binop (mode, PLUS,
1697 target, gen_int_mode (ifalse, mode),
1698 if_info->x, 0, OPTAB_WIDEN);
1699 if (target)
1701 if (target != if_info->x)
1702 noce_emit_move_insn (if_info->x, target);
1704 seq = end_ifcvt_sequence (if_info);
1705 if (!seq)
1706 return FALSE;
1708 emit_insn_before_setloc (seq, if_info->jump,
1709 INSN_LOCATION (if_info->insn_a));
1710 return TRUE;
1712 else
1714 end_sequence ();
1715 return FALSE;
1718 else
1719 end_sequence ();
1722 return FALSE;
1725 /* Try more complex cases involving conditional_move. */
1727 static int
1728 noce_try_cmove_arith (struct noce_if_info *if_info)
1730 rtx a = if_info->a;
1731 rtx b = if_info->b;
1732 rtx x = if_info->x;
1733 rtx orig_a, orig_b;
1734 rtx_insn *insn_a, *insn_b;
1735 rtx target;
1736 int is_mem = 0;
1737 int insn_cost;
1738 enum rtx_code code;
1739 rtx_insn *ifcvt_seq;
1741 /* A conditional move from two memory sources is equivalent to a
1742 conditional on their addresses followed by a load. Don't do this
1743 early because it'll screw alias analysis. Note that we've
1744 already checked for no side effects. */
1745 /* ??? FIXME: Magic number 5. */
1746 if (cse_not_expected
1747 && MEM_P (a) && MEM_P (b)
1748 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1749 && if_info->branch_cost >= 5)
1751 machine_mode address_mode = get_address_mode (a);
1753 a = XEXP (a, 0);
1754 b = XEXP (b, 0);
1755 x = gen_reg_rtx (address_mode);
1756 is_mem = 1;
1759 /* ??? We could handle this if we knew that a load from A or B could
1760 not trap or fault. This is also true if we've already loaded
1761 from the address along the path from ENTRY. */
1762 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1763 return FALSE;
1765 /* if (test) x = a + b; else x = c - d;
1766 => y = a + b;
1767 x = c - d;
1768 if (test)
1769 x = y;
1772 code = GET_CODE (if_info->cond);
1773 insn_a = if_info->insn_a;
1774 insn_b = if_info->insn_b;
1776 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1777 if insn_rtx_cost can't be estimated. */
1778 if (insn_a)
1780 insn_cost
1781 = insn_rtx_cost (PATTERN (insn_a),
1782 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1783 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1784 return FALSE;
1786 else
1787 insn_cost = 0;
1789 if (insn_b)
1791 insn_cost
1792 += insn_rtx_cost (PATTERN (insn_b),
1793 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1794 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1795 return FALSE;
1798 /* Possibly rearrange operands to make things come out more natural. */
1799 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1801 int reversep = 0;
1802 if (rtx_equal_p (b, x))
1803 reversep = 1;
1804 else if (general_operand (b, GET_MODE (b)))
1805 reversep = 1;
1807 if (reversep)
1809 code = reversed_comparison_code (if_info->cond, if_info->jump);
1810 std::swap (a, b);
1811 std::swap (insn_a, insn_b);
1815 start_sequence ();
1817 orig_a = a;
1818 orig_b = b;
1820 /* If either operand is complex, load it into a register first.
1821 The best way to do this is to copy the original insn. In this
1822 way we preserve any clobbers etc that the insn may have had.
1823 This is of course not possible in the IS_MEM case. */
1824 if (! general_operand (a, GET_MODE (a)))
1826 rtx_insn *insn;
1828 if (is_mem)
1830 rtx reg = gen_reg_rtx (GET_MODE (a));
1831 insn = emit_insn (gen_rtx_SET (reg, a));
1833 else if (! insn_a)
1834 goto end_seq_and_fail;
1835 else
1837 a = gen_reg_rtx (GET_MODE (a));
1838 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1839 rtx set = single_set (copy_of_a);
1840 SET_DEST (set) = a;
1841 insn = emit_insn (PATTERN (copy_of_a));
1843 if (recog_memoized (insn) < 0)
1844 goto end_seq_and_fail;
1846 if (! general_operand (b, GET_MODE (b)))
1848 rtx pat;
1849 rtx_insn *last;
1850 rtx_insn *new_insn;
1852 if (is_mem)
1854 rtx reg = gen_reg_rtx (GET_MODE (b));
1855 pat = gen_rtx_SET (reg, b);
1857 else if (! insn_b)
1858 goto end_seq_and_fail;
1859 else
1861 b = gen_reg_rtx (GET_MODE (b));
1862 rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1863 rtx set = single_set (copy_of_insn_b);
1864 SET_DEST (set) = b;
1865 pat = PATTERN (copy_of_insn_b);
1868 /* If insn to set up A clobbers any registers B depends on, try to
1869 swap insn that sets up A with the one that sets up B. If even
1870 that doesn't help, punt. */
1871 last = get_last_insn ();
1872 if (last && modified_in_p (orig_b, last))
1874 new_insn = emit_insn_before (pat, get_insns ());
1875 if (modified_in_p (orig_a, new_insn))
1876 goto end_seq_and_fail;
1878 else
1879 new_insn = emit_insn (pat);
1881 if (recog_memoized (new_insn) < 0)
1882 goto end_seq_and_fail;
1885 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1886 XEXP (if_info->cond, 1), a, b);
1888 if (! target)
1889 goto end_seq_and_fail;
1891 /* If we're handling a memory for above, emit the load now. */
1892 if (is_mem)
1894 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1896 /* Copy over flags as appropriate. */
1897 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1898 MEM_VOLATILE_P (mem) = 1;
1899 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1900 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1901 set_mem_align (mem,
1902 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1904 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1905 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1907 noce_emit_move_insn (if_info->x, mem);
1909 else if (target != x)
1910 noce_emit_move_insn (x, target);
1912 ifcvt_seq = end_ifcvt_sequence (if_info);
1913 if (!ifcvt_seq)
1914 return FALSE;
1916 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1917 INSN_LOCATION (if_info->insn_a));
1918 return TRUE;
1920 end_seq_and_fail:
1921 end_sequence ();
1922 return FALSE;
1925 /* For most cases, the simplified condition we found is the best
1926 choice, but this is not the case for the min/max/abs transforms.
1927 For these we wish to know that it is A or B in the condition. */
1929 static rtx
1930 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1931 rtx_insn **earliest)
1933 rtx cond, set;
1934 rtx_insn *insn;
1935 int reverse;
1937 /* If target is already mentioned in the known condition, return it. */
1938 if (reg_mentioned_p (target, if_info->cond))
1940 *earliest = if_info->cond_earliest;
1941 return if_info->cond;
1944 set = pc_set (if_info->jump);
1945 cond = XEXP (SET_SRC (set), 0);
1946 reverse
1947 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1948 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1949 if (if_info->then_else_reversed)
1950 reverse = !reverse;
1952 /* If we're looking for a constant, try to make the conditional
1953 have that constant in it. There are two reasons why it may
1954 not have the constant we want:
1956 1. GCC may have needed to put the constant in a register, because
1957 the target can't compare directly against that constant. For
1958 this case, we look for a SET immediately before the comparison
1959 that puts a constant in that register.
1961 2. GCC may have canonicalized the conditional, for example
1962 replacing "if x < 4" with "if x <= 3". We can undo that (or
1963 make equivalent types of changes) to get the constants we need
1964 if they're off by one in the right direction. */
1966 if (CONST_INT_P (target))
1968 enum rtx_code code = GET_CODE (if_info->cond);
1969 rtx op_a = XEXP (if_info->cond, 0);
1970 rtx op_b = XEXP (if_info->cond, 1);
1971 rtx_insn *prev_insn;
1973 /* First, look to see if we put a constant in a register. */
1974 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1975 if (prev_insn
1976 && BLOCK_FOR_INSN (prev_insn)
1977 == BLOCK_FOR_INSN (if_info->cond_earliest)
1978 && INSN_P (prev_insn)
1979 && GET_CODE (PATTERN (prev_insn)) == SET)
1981 rtx src = find_reg_equal_equiv_note (prev_insn);
1982 if (!src)
1983 src = SET_SRC (PATTERN (prev_insn));
1984 if (CONST_INT_P (src))
1986 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1987 op_a = src;
1988 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1989 op_b = src;
1991 if (CONST_INT_P (op_a))
1993 std::swap (op_a, op_b);
1994 code = swap_condition (code);
1999 /* Now, look to see if we can get the right constant by
2000 adjusting the conditional. */
2001 if (CONST_INT_P (op_b))
2003 HOST_WIDE_INT desired_val = INTVAL (target);
2004 HOST_WIDE_INT actual_val = INTVAL (op_b);
2006 switch (code)
2008 case LT:
2009 if (actual_val == desired_val + 1)
2011 code = LE;
2012 op_b = GEN_INT (desired_val);
2014 break;
2015 case LE:
2016 if (actual_val == desired_val - 1)
2018 code = LT;
2019 op_b = GEN_INT (desired_val);
2021 break;
2022 case GT:
2023 if (actual_val == desired_val - 1)
2025 code = GE;
2026 op_b = GEN_INT (desired_val);
2028 break;
2029 case GE:
2030 if (actual_val == desired_val + 1)
2032 code = GT;
2033 op_b = GEN_INT (desired_val);
2035 break;
2036 default:
2037 break;
2041 /* If we made any changes, generate a new conditional that is
2042 equivalent to what we started with, but has the right
2043 constants in it. */
2044 if (code != GET_CODE (if_info->cond)
2045 || op_a != XEXP (if_info->cond, 0)
2046 || op_b != XEXP (if_info->cond, 1))
2048 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
2049 *earliest = if_info->cond_earliest;
2050 return cond;
2054 cond = canonicalize_condition (if_info->jump, cond, reverse,
2055 earliest, target, HAVE_cbranchcc4, true);
2056 if (! cond || ! reg_mentioned_p (target, cond))
2057 return NULL;
2059 /* We almost certainly searched back to a different place.
2060 Need to re-verify correct lifetimes. */
2062 /* X may not be mentioned in the range (cond_earliest, jump]. */
2063 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
2064 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
2065 return NULL;
2067 /* A and B may not be modified in the range [cond_earliest, jump). */
2068 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
2069 if (INSN_P (insn)
2070 && (modified_in_p (if_info->a, insn)
2071 || modified_in_p (if_info->b, insn)))
2072 return NULL;
2074 return cond;
2077 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
2079 static int
2080 noce_try_minmax (struct noce_if_info *if_info)
2082 rtx cond, target;
2083 rtx_insn *earliest, *seq;
2084 enum rtx_code code, op;
2085 int unsignedp;
2087 /* ??? Reject modes with NaNs or signed zeros since we don't know how
2088 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
2089 to get the target to tell us... */
2090 if (HONOR_SIGNED_ZEROS (if_info->x)
2091 || HONOR_NANS (if_info->x))
2092 return FALSE;
2094 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
2095 if (!cond)
2096 return FALSE;
2098 /* Verify the condition is of the form we expect, and canonicalize
2099 the comparison code. */
2100 code = GET_CODE (cond);
2101 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
2103 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
2104 return FALSE;
2106 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
2108 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
2109 return FALSE;
2110 code = swap_condition (code);
2112 else
2113 return FALSE;
2115 /* Determine what sort of operation this is. Note that the code is for
2116 a taken branch, so the code->operation mapping appears backwards. */
2117 switch (code)
2119 case LT:
2120 case LE:
2121 case UNLT:
2122 case UNLE:
2123 op = SMAX;
2124 unsignedp = 0;
2125 break;
2126 case GT:
2127 case GE:
2128 case UNGT:
2129 case UNGE:
2130 op = SMIN;
2131 unsignedp = 0;
2132 break;
2133 case LTU:
2134 case LEU:
2135 op = UMAX;
2136 unsignedp = 1;
2137 break;
2138 case GTU:
2139 case GEU:
2140 op = UMIN;
2141 unsignedp = 1;
2142 break;
2143 default:
2144 return FALSE;
2147 start_sequence ();
2149 target = expand_simple_binop (GET_MODE (if_info->x), op,
2150 if_info->a, if_info->b,
2151 if_info->x, unsignedp, OPTAB_WIDEN);
2152 if (! target)
2154 end_sequence ();
2155 return FALSE;
2157 if (target != if_info->x)
2158 noce_emit_move_insn (if_info->x, target);
2160 seq = end_ifcvt_sequence (if_info);
2161 if (!seq)
2162 return FALSE;
2164 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2165 if_info->cond = cond;
2166 if_info->cond_earliest = earliest;
2168 return TRUE;
2171 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2172 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2173 etc. */
2175 static int
2176 noce_try_abs (struct noce_if_info *if_info)
2178 rtx cond, target, a, b, c;
2179 rtx_insn *earliest, *seq;
2180 int negate;
2181 bool one_cmpl = false;
2183 /* Reject modes with signed zeros. */
2184 if (HONOR_SIGNED_ZEROS (if_info->x))
2185 return FALSE;
2187 /* Recognize A and B as constituting an ABS or NABS. The canonical
2188 form is a branch around the negation, taken when the object is the
2189 first operand of a comparison against 0 that evaluates to true. */
2190 a = if_info->a;
2191 b = if_info->b;
2192 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2193 negate = 0;
2194 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2196 std::swap (a, b);
2197 negate = 1;
2199 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2201 negate = 0;
2202 one_cmpl = true;
2204 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2206 std::swap (a, b);
2207 negate = 1;
2208 one_cmpl = true;
2210 else
2211 return FALSE;
2213 cond = noce_get_alt_condition (if_info, b, &earliest);
2214 if (!cond)
2215 return FALSE;
2217 /* Verify the condition is of the form we expect. */
2218 if (rtx_equal_p (XEXP (cond, 0), b))
2219 c = XEXP (cond, 1);
2220 else if (rtx_equal_p (XEXP (cond, 1), b))
2222 c = XEXP (cond, 0);
2223 negate = !negate;
2225 else
2226 return FALSE;
2228 /* Verify that C is zero. Search one step backward for a
2229 REG_EQUAL note or a simple source if necessary. */
2230 if (REG_P (c))
2232 rtx set;
2233 rtx_insn *insn = prev_nonnote_insn (earliest);
2234 if (insn
2235 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2236 && (set = single_set (insn))
2237 && rtx_equal_p (SET_DEST (set), c))
2239 rtx note = find_reg_equal_equiv_note (insn);
2240 if (note)
2241 c = XEXP (note, 0);
2242 else
2243 c = SET_SRC (set);
2245 else
2246 return FALSE;
2248 if (MEM_P (c)
2249 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2250 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2251 c = get_pool_constant (XEXP (c, 0));
2253 /* Work around funny ideas get_condition has wrt canonicalization.
2254 Note that these rtx constants are known to be CONST_INT, and
2255 therefore imply integer comparisons. */
2256 if (c == constm1_rtx && GET_CODE (cond) == GT)
2258 else if (c == const1_rtx && GET_CODE (cond) == LT)
2260 else if (c != CONST0_RTX (GET_MODE (b)))
2261 return FALSE;
2263 /* Determine what sort of operation this is. */
2264 switch (GET_CODE (cond))
2266 case LT:
2267 case LE:
2268 case UNLT:
2269 case UNLE:
2270 negate = !negate;
2271 break;
2272 case GT:
2273 case GE:
2274 case UNGT:
2275 case UNGE:
2276 break;
2277 default:
2278 return FALSE;
2281 start_sequence ();
2282 if (one_cmpl)
2283 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2284 if_info->x);
2285 else
2286 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2288 /* ??? It's a quandary whether cmove would be better here, especially
2289 for integers. Perhaps combine will clean things up. */
2290 if (target && negate)
2292 if (one_cmpl)
2293 target = expand_simple_unop (GET_MODE (target), NOT, target,
2294 if_info->x, 0);
2295 else
2296 target = expand_simple_unop (GET_MODE (target), NEG, target,
2297 if_info->x, 0);
2300 if (! target)
2302 end_sequence ();
2303 return FALSE;
2306 if (target != if_info->x)
2307 noce_emit_move_insn (if_info->x, target);
2309 seq = end_ifcvt_sequence (if_info);
2310 if (!seq)
2311 return FALSE;
2313 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2314 if_info->cond = cond;
2315 if_info->cond_earliest = earliest;
2317 return TRUE;
2320 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2322 static int
2323 noce_try_sign_mask (struct noce_if_info *if_info)
2325 rtx cond, t, m, c;
2326 rtx_insn *seq;
2327 machine_mode mode;
2328 enum rtx_code code;
2329 bool t_unconditional;
2331 cond = if_info->cond;
2332 code = GET_CODE (cond);
2333 m = XEXP (cond, 0);
2334 c = XEXP (cond, 1);
2336 t = NULL_RTX;
2337 if (if_info->a == const0_rtx)
2339 if ((code == LT && c == const0_rtx)
2340 || (code == LE && c == constm1_rtx))
2341 t = if_info->b;
2343 else if (if_info->b == const0_rtx)
2345 if ((code == GE && c == const0_rtx)
2346 || (code == GT && c == constm1_rtx))
2347 t = if_info->a;
2350 if (! t || side_effects_p (t))
2351 return FALSE;
2353 /* We currently don't handle different modes. */
2354 mode = GET_MODE (t);
2355 if (GET_MODE (m) != mode)
2356 return FALSE;
2358 /* This is only profitable if T is unconditionally executed/evaluated in the
2359 original insn sequence or T is cheap. The former happens if B is the
2360 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2361 INSN_B which can happen for e.g. conditional stores to memory. For the
2362 cost computation use the block TEST_BB where the evaluation will end up
2363 after the transformation. */
2364 t_unconditional =
2365 (t == if_info->b
2366 && (if_info->insn_b == NULL_RTX
2367 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2368 if (!(t_unconditional
2369 || (set_src_cost (t, mode, optimize_bb_for_speed_p (if_info->test_bb))
2370 < COSTS_N_INSNS (2))))
2371 return FALSE;
2373 start_sequence ();
2374 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2375 "(signed) m >> 31" directly. This benefits targets with specialized
2376 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2377 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2378 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2379 : NULL_RTX;
2381 if (!t)
2383 end_sequence ();
2384 return FALSE;
2387 noce_emit_move_insn (if_info->x, t);
2389 seq = end_ifcvt_sequence (if_info);
2390 if (!seq)
2391 return FALSE;
2393 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2394 return TRUE;
2398 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2399 transformations. */
2401 static int
2402 noce_try_bitop (struct noce_if_info *if_info)
2404 rtx cond, x, a, result;
2405 rtx_insn *seq;
2406 machine_mode mode;
2407 enum rtx_code code;
2408 int bitnum;
2410 x = if_info->x;
2411 cond = if_info->cond;
2412 code = GET_CODE (cond);
2414 /* Check for no else condition. */
2415 if (! rtx_equal_p (x, if_info->b))
2416 return FALSE;
2418 /* Check for a suitable condition. */
2419 if (code != NE && code != EQ)
2420 return FALSE;
2421 if (XEXP (cond, 1) != const0_rtx)
2422 return FALSE;
2423 cond = XEXP (cond, 0);
2425 /* ??? We could also handle AND here. */
2426 if (GET_CODE (cond) == ZERO_EXTRACT)
2428 if (XEXP (cond, 1) != const1_rtx
2429 || !CONST_INT_P (XEXP (cond, 2))
2430 || ! rtx_equal_p (x, XEXP (cond, 0)))
2431 return FALSE;
2432 bitnum = INTVAL (XEXP (cond, 2));
2433 mode = GET_MODE (x);
2434 if (BITS_BIG_ENDIAN)
2435 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2436 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2437 return FALSE;
2439 else
2440 return FALSE;
2442 a = if_info->a;
2443 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2445 /* Check for "if (X & C) x = x op C". */
2446 if (! rtx_equal_p (x, XEXP (a, 0))
2447 || !CONST_INT_P (XEXP (a, 1))
2448 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2449 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2450 return FALSE;
2452 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2453 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2454 if (GET_CODE (a) == IOR)
2455 result = (code == NE) ? a : NULL_RTX;
2456 else if (code == NE)
2458 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2459 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2460 result = simplify_gen_binary (IOR, mode, x, result);
2462 else
2464 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2465 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2466 result = simplify_gen_binary (AND, mode, x, result);
2469 else if (GET_CODE (a) == AND)
2471 /* Check for "if (X & C) x &= ~C". */
2472 if (! rtx_equal_p (x, XEXP (a, 0))
2473 || !CONST_INT_P (XEXP (a, 1))
2474 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2475 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2476 return FALSE;
2478 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2479 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2480 result = (code == EQ) ? a : NULL_RTX;
2482 else
2483 return FALSE;
2485 if (result)
2487 start_sequence ();
2488 noce_emit_move_insn (x, result);
2489 seq = end_ifcvt_sequence (if_info);
2490 if (!seq)
2491 return FALSE;
2493 emit_insn_before_setloc (seq, if_info->jump,
2494 INSN_LOCATION (if_info->insn_a));
2496 return TRUE;
2500 /* Similar to get_condition, only the resulting condition must be
2501 valid at JUMP, instead of at EARLIEST.
2503 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2504 THEN block of the caller, and we have to reverse the condition. */
2506 static rtx
2507 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2509 rtx cond, set, tmp;
2510 bool reverse;
2512 if (! any_condjump_p (jump))
2513 return NULL_RTX;
2515 set = pc_set (jump);
2517 /* If this branches to JUMP_LABEL when the condition is false,
2518 reverse the condition. */
2519 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2520 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2522 /* We may have to reverse because the caller's if block is not canonical,
2523 i.e. the THEN block isn't the fallthrough block for the TEST block
2524 (see find_if_header). */
2525 if (then_else_reversed)
2526 reverse = !reverse;
2528 /* If the condition variable is a register and is MODE_INT, accept it. */
2530 cond = XEXP (SET_SRC (set), 0);
2531 tmp = XEXP (cond, 0);
2532 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2533 && (GET_MODE (tmp) != BImode
2534 || !targetm.small_register_classes_for_mode_p (BImode)))
2536 *earliest = jump;
2538 if (reverse)
2539 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2540 GET_MODE (cond), tmp, XEXP (cond, 1));
2541 return cond;
2544 /* Otherwise, fall back on canonicalize_condition to do the dirty
2545 work of manipulating MODE_CC values and COMPARE rtx codes. */
2546 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2547 NULL_RTX, HAVE_cbranchcc4, true);
2549 /* We don't handle side-effects in the condition, like handling
2550 REG_INC notes and making sure no duplicate conditions are emitted. */
2551 if (tmp != NULL_RTX && side_effects_p (tmp))
2552 return NULL_RTX;
2554 return tmp;
2557 /* Return true if OP is ok for if-then-else processing. */
2559 static int
2560 noce_operand_ok (const_rtx op)
2562 if (side_effects_p (op))
2563 return FALSE;
2565 /* We special-case memories, so handle any of them with
2566 no address side effects. */
2567 if (MEM_P (op))
2568 return ! side_effects_p (XEXP (op, 0));
2570 return ! may_trap_p (op);
2573 /* Return true if a write into MEM may trap or fault. */
2575 static bool
2576 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2578 rtx addr;
2580 if (MEM_READONLY_P (mem))
2581 return true;
2583 if (may_trap_or_fault_p (mem))
2584 return true;
2586 addr = XEXP (mem, 0);
2588 /* Call target hook to avoid the effects of -fpic etc.... */
2589 addr = targetm.delegitimize_address (addr);
2591 while (addr)
2592 switch (GET_CODE (addr))
2594 case CONST:
2595 case PRE_DEC:
2596 case PRE_INC:
2597 case POST_DEC:
2598 case POST_INC:
2599 case POST_MODIFY:
2600 addr = XEXP (addr, 0);
2601 break;
2602 case LO_SUM:
2603 case PRE_MODIFY:
2604 addr = XEXP (addr, 1);
2605 break;
2606 case PLUS:
2607 if (CONST_INT_P (XEXP (addr, 1)))
2608 addr = XEXP (addr, 0);
2609 else
2610 return false;
2611 break;
2612 case LABEL_REF:
2613 return true;
2614 case SYMBOL_REF:
2615 if (SYMBOL_REF_DECL (addr)
2616 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2617 return true;
2618 return false;
2619 default:
2620 return false;
2623 return false;
2626 /* Return whether we can use store speculation for MEM. TOP_BB is the
2627 basic block above the conditional block where we are considering
2628 doing the speculative store. We look for whether MEM is set
2629 unconditionally later in the function. */
2631 static bool
2632 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2634 basic_block dominator;
2636 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2637 dominator != NULL;
2638 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2640 rtx_insn *insn;
2642 FOR_BB_INSNS (dominator, insn)
2644 /* If we see something that might be a memory barrier, we
2645 have to stop looking. Even if the MEM is set later in
2646 the function, we still don't want to set it
2647 unconditionally before the barrier. */
2648 if (INSN_P (insn)
2649 && (volatile_insn_p (PATTERN (insn))
2650 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2651 return false;
2653 if (memory_must_be_modified_in_insn_p (mem, insn))
2654 return true;
2655 if (modified_in_p (XEXP (mem, 0), insn))
2656 return false;
2661 return false;
2664 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2665 it without using conditional execution. Return TRUE if we were successful
2666 at converting the block. */
2668 static int
2669 noce_process_if_block (struct noce_if_info *if_info)
2671 basic_block test_bb = if_info->test_bb; /* test block */
2672 basic_block then_bb = if_info->then_bb; /* THEN */
2673 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2674 basic_block join_bb = if_info->join_bb; /* JOIN */
2675 rtx_insn *jump = if_info->jump;
2676 rtx cond = if_info->cond;
2677 rtx_insn *insn_a, *insn_b;
2678 rtx set_a, set_b;
2679 rtx orig_x, x, a, b;
2680 rtx cc;
2682 /* We're looking for patterns of the form
2684 (1) if (...) x = a; else x = b;
2685 (2) x = b; if (...) x = a;
2686 (3) if (...) x = a; // as if with an initial x = x.
2688 The later patterns require jumps to be more expensive.
2690 ??? For future expansion, look for multiple X in such patterns. */
2692 /* Look for one of the potential sets. */
2693 insn_a = first_active_insn (then_bb);
2694 if (! insn_a
2695 || insn_a != last_active_insn (then_bb, FALSE)
2696 || (set_a = single_set (insn_a)) == NULL_RTX)
2697 return FALSE;
2699 x = SET_DEST (set_a);
2700 a = SET_SRC (set_a);
2702 /* Look for the other potential set. Make sure we've got equivalent
2703 destinations. */
2704 /* ??? This is overconservative. Storing to two different mems is
2705 as easy as conditionally computing the address. Storing to a
2706 single mem merely requires a scratch memory to use as one of the
2707 destination addresses; often the memory immediately below the
2708 stack pointer is available for this. */
2709 set_b = NULL_RTX;
2710 if (else_bb)
2712 insn_b = first_active_insn (else_bb);
2713 if (! insn_b
2714 || insn_b != last_active_insn (else_bb, FALSE)
2715 || (set_b = single_set (insn_b)) == NULL_RTX
2716 || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2717 return FALSE;
2719 else
2721 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2722 /* We're going to be moving the evaluation of B down from above
2723 COND_EARLIEST to JUMP. Make sure the relevant data is still
2724 intact. */
2725 if (! insn_b
2726 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2727 || !NONJUMP_INSN_P (insn_b)
2728 || (set_b = single_set (insn_b)) == NULL_RTX
2729 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2730 || ! noce_operand_ok (SET_SRC (set_b))
2731 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2732 || modified_between_p (SET_SRC (set_b), insn_b, jump)
2733 /* Avoid extending the lifetime of hard registers on small
2734 register class machines. */
2735 || (REG_P (SET_SRC (set_b))
2736 && HARD_REGISTER_P (SET_SRC (set_b))
2737 && targetm.small_register_classes_for_mode_p
2738 (GET_MODE (SET_SRC (set_b))))
2739 /* Likewise with X. In particular this can happen when
2740 noce_get_condition looks farther back in the instruction
2741 stream than one might expect. */
2742 || reg_overlap_mentioned_p (x, cond)
2743 || reg_overlap_mentioned_p (x, a)
2744 || modified_between_p (x, insn_b, jump))
2746 insn_b = NULL;
2747 set_b = NULL_RTX;
2751 /* If x has side effects then only the if-then-else form is safe to
2752 convert. But even in that case we would need to restore any notes
2753 (such as REG_INC) at then end. That can be tricky if
2754 noce_emit_move_insn expands to more than one insn, so disable the
2755 optimization entirely for now if there are side effects. */
2756 if (side_effects_p (x))
2757 return FALSE;
2759 b = (set_b ? SET_SRC (set_b) : x);
2761 /* Only operate on register destinations, and even then avoid extending
2762 the lifetime of hard registers on small register class machines. */
2763 orig_x = x;
2764 if (!REG_P (x)
2765 || (HARD_REGISTER_P (x)
2766 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2768 if (GET_MODE (x) == BLKmode)
2769 return FALSE;
2771 if (GET_CODE (x) == ZERO_EXTRACT
2772 && (!CONST_INT_P (XEXP (x, 1))
2773 || !CONST_INT_P (XEXP (x, 2))))
2774 return FALSE;
2776 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2777 ? XEXP (x, 0) : x));
2780 /* Don't operate on sources that may trap or are volatile. */
2781 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2782 return FALSE;
2784 retry:
2785 /* Set up the info block for our subroutines. */
2786 if_info->insn_a = insn_a;
2787 if_info->insn_b = insn_b;
2788 if_info->x = x;
2789 if_info->a = a;
2790 if_info->b = b;
2792 /* Skip it if the instruction to be moved might clobber CC. */
2793 cc = cc_in_cond (cond);
2794 if (cc
2795 && (set_of (cc, insn_a)
2796 || (insn_b && set_of (cc, insn_b))))
2797 return FALSE;
2799 /* Try optimizations in some approximation of a useful order. */
2800 /* ??? Should first look to see if X is live incoming at all. If it
2801 isn't, we don't need anything but an unconditional set. */
2803 /* Look and see if A and B are really the same. Avoid creating silly
2804 cmove constructs that no one will fix up later. */
2805 if (rtx_interchangeable_p (a, b))
2807 /* If we have an INSN_B, we don't have to create any new rtl. Just
2808 move the instruction that we already have. If we don't have an
2809 INSN_B, that means that A == X, and we've got a noop move. In
2810 that case don't do anything and let the code below delete INSN_A. */
2811 if (insn_b && else_bb)
2813 rtx note;
2815 if (else_bb && insn_b == BB_END (else_bb))
2816 BB_END (else_bb) = PREV_INSN (insn_b);
2817 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2819 /* If there was a REG_EQUAL note, delete it since it may have been
2820 true due to this insn being after a jump. */
2821 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2822 remove_note (insn_b, note);
2824 insn_b = NULL;
2826 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2827 x must be executed twice. */
2828 else if (insn_b && side_effects_p (orig_x))
2829 return FALSE;
2831 x = orig_x;
2832 goto success;
2835 if (!set_b && MEM_P (orig_x))
2837 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2838 for optimizations if writing to x may trap or fault,
2839 i.e. it's a memory other than a static var or a stack slot,
2840 is misaligned on strict aligned machines or is read-only. If
2841 x is a read-only memory, then the program is valid only if we
2842 avoid the store into it. If there are stores on both the
2843 THEN and ELSE arms, then we can go ahead with the conversion;
2844 either the program is broken, or the condition is always
2845 false such that the other memory is selected. */
2846 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2847 return FALSE;
2849 /* Avoid store speculation: given "if (...) x = a" where x is a
2850 MEM, we only want to do the store if x is always set
2851 somewhere in the function. This avoids cases like
2852 if (pthread_mutex_trylock(mutex))
2853 ++global_variable;
2854 where we only want global_variable to be changed if the mutex
2855 is held. FIXME: This should ideally be expressed directly in
2856 RTL somehow. */
2857 if (!noce_can_store_speculate_p (test_bb, orig_x))
2858 return FALSE;
2861 if (noce_try_move (if_info))
2862 goto success;
2863 if (noce_try_store_flag (if_info))
2864 goto success;
2865 if (noce_try_bitop (if_info))
2866 goto success;
2867 if (noce_try_minmax (if_info))
2868 goto success;
2869 if (noce_try_abs (if_info))
2870 goto success;
2871 if (!targetm.have_conditional_execution ()
2872 && noce_try_store_flag_constants (if_info))
2873 goto success;
2874 if (HAVE_conditional_move
2875 && noce_try_cmove (if_info))
2876 goto success;
2877 if (! targetm.have_conditional_execution ())
2879 if (noce_try_addcc (if_info))
2880 goto success;
2881 if (noce_try_store_flag_mask (if_info))
2882 goto success;
2883 if (HAVE_conditional_move
2884 && noce_try_cmove_arith (if_info))
2885 goto success;
2886 if (noce_try_sign_mask (if_info))
2887 goto success;
2890 if (!else_bb && set_b)
2892 insn_b = NULL;
2893 set_b = NULL_RTX;
2894 b = orig_x;
2895 goto retry;
2898 return FALSE;
2900 success:
2902 /* If we used a temporary, fix it up now. */
2903 if (orig_x != x)
2905 rtx_insn *seq;
2907 start_sequence ();
2908 noce_emit_move_insn (orig_x, x);
2909 seq = get_insns ();
2910 set_used_flags (orig_x);
2911 unshare_all_rtl_in_chain (seq);
2912 end_sequence ();
2914 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2917 /* The original THEN and ELSE blocks may now be removed. The test block
2918 must now jump to the join block. If the test block and the join block
2919 can be merged, do so. */
2920 if (else_bb)
2922 delete_basic_block (else_bb);
2923 num_true_changes++;
2925 else
2926 remove_edge (find_edge (test_bb, join_bb));
2928 remove_edge (find_edge (then_bb, join_bb));
2929 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2930 delete_basic_block (then_bb);
2931 num_true_changes++;
2933 if (can_merge_blocks_p (test_bb, join_bb))
2935 merge_blocks (test_bb, join_bb);
2936 num_true_changes++;
2939 num_updated_if_blocks++;
2940 return TRUE;
2943 /* Check whether a block is suitable for conditional move conversion.
2944 Every insn must be a simple set of a register to a constant or a
2945 register. For each assignment, store the value in the pointer map
2946 VALS, keyed indexed by register pointer, then store the register
2947 pointer in REGS. COND is the condition we will test. */
2949 static int
2950 check_cond_move_block (basic_block bb,
2951 hash_map<rtx, rtx> *vals,
2952 vec<rtx> *regs,
2953 rtx cond)
2955 rtx_insn *insn;
2956 rtx cc = cc_in_cond (cond);
2958 /* We can only handle simple jumps at the end of the basic block.
2959 It is almost impossible to update the CFG otherwise. */
2960 insn = BB_END (bb);
2961 if (JUMP_P (insn) && !onlyjump_p (insn))
2962 return FALSE;
2964 FOR_BB_INSNS (bb, insn)
2966 rtx set, dest, src;
2968 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2969 continue;
2970 set = single_set (insn);
2971 if (!set)
2972 return FALSE;
2974 dest = SET_DEST (set);
2975 src = SET_SRC (set);
2976 if (!REG_P (dest)
2977 || (HARD_REGISTER_P (dest)
2978 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2979 return FALSE;
2981 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2982 return FALSE;
2984 if (side_effects_p (src) || side_effects_p (dest))
2985 return FALSE;
2987 if (may_trap_p (src) || may_trap_p (dest))
2988 return FALSE;
2990 /* Don't try to handle this if the source register was
2991 modified earlier in the block. */
2992 if ((REG_P (src)
2993 && vals->get (src))
2994 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2995 && vals->get (SUBREG_REG (src))))
2996 return FALSE;
2998 /* Don't try to handle this if the destination register was
2999 modified earlier in the block. */
3000 if (vals->get (dest))
3001 return FALSE;
3003 /* Don't try to handle this if the condition uses the
3004 destination register. */
3005 if (reg_overlap_mentioned_p (dest, cond))
3006 return FALSE;
3008 /* Don't try to handle this if the source register is modified
3009 later in the block. */
3010 if (!CONSTANT_P (src)
3011 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
3012 return FALSE;
3014 /* Skip it if the instruction to be moved might clobber CC. */
3015 if (cc && set_of (cc, insn))
3016 return FALSE;
3018 vals->put (dest, src);
3020 regs->safe_push (dest);
3023 return TRUE;
3026 /* Given a basic block BB suitable for conditional move conversion,
3027 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
3028 the register values depending on COND, emit the insns in the block as
3029 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
3030 processed. The caller has started a sequence for the conversion.
3031 Return true if successful, false if something goes wrong. */
3033 static bool
3034 cond_move_convert_if_block (struct noce_if_info *if_infop,
3035 basic_block bb, rtx cond,
3036 hash_map<rtx, rtx> *then_vals,
3037 hash_map<rtx, rtx> *else_vals,
3038 bool else_block_p)
3040 enum rtx_code code;
3041 rtx_insn *insn;
3042 rtx cond_arg0, cond_arg1;
3044 code = GET_CODE (cond);
3045 cond_arg0 = XEXP (cond, 0);
3046 cond_arg1 = XEXP (cond, 1);
3048 FOR_BB_INSNS (bb, insn)
3050 rtx set, target, dest, t, e;
3052 /* ??? Maybe emit conditional debug insn? */
3053 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3054 continue;
3055 set = single_set (insn);
3056 gcc_assert (set && REG_P (SET_DEST (set)));
3058 dest = SET_DEST (set);
3060 rtx *then_slot = then_vals->get (dest);
3061 rtx *else_slot = else_vals->get (dest);
3062 t = then_slot ? *then_slot : NULL_RTX;
3063 e = else_slot ? *else_slot : NULL_RTX;
3065 if (else_block_p)
3067 /* If this register was set in the then block, we already
3068 handled this case there. */
3069 if (t)
3070 continue;
3071 t = dest;
3072 gcc_assert (e);
3074 else
3076 gcc_assert (t);
3077 if (!e)
3078 e = dest;
3081 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
3082 t, e);
3083 if (!target)
3084 return false;
3086 if (target != dest)
3087 noce_emit_move_insn (dest, target);
3090 return true;
3093 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3094 it using only conditional moves. Return TRUE if we were successful at
3095 converting the block. */
3097 static int
3098 cond_move_process_if_block (struct noce_if_info *if_info)
3100 basic_block test_bb = if_info->test_bb;
3101 basic_block then_bb = if_info->then_bb;
3102 basic_block else_bb = if_info->else_bb;
3103 basic_block join_bb = if_info->join_bb;
3104 rtx_insn *jump = if_info->jump;
3105 rtx cond = if_info->cond;
3106 rtx_insn *seq, *loc_insn;
3107 rtx reg;
3108 int c;
3109 vec<rtx> then_regs = vNULL;
3110 vec<rtx> else_regs = vNULL;
3111 unsigned int i;
3112 int success_p = FALSE;
3114 /* Build a mapping for each block to the value used for each
3115 register. */
3116 hash_map<rtx, rtx> then_vals;
3117 hash_map<rtx, rtx> else_vals;
3119 /* Make sure the blocks are suitable. */
3120 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
3121 || (else_bb
3122 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
3123 goto done;
3125 /* Make sure the blocks can be used together. If the same register
3126 is set in both blocks, and is not set to a constant in both
3127 cases, then both blocks must set it to the same register. We
3128 have already verified that if it is set to a register, that the
3129 source register does not change after the assignment. Also count
3130 the number of registers set in only one of the blocks. */
3131 c = 0;
3132 FOR_EACH_VEC_ELT (then_regs, i, reg)
3134 rtx *then_slot = then_vals.get (reg);
3135 rtx *else_slot = else_vals.get (reg);
3137 gcc_checking_assert (then_slot);
3138 if (!else_slot)
3139 ++c;
3140 else
3142 rtx then_val = *then_slot;
3143 rtx else_val = *else_slot;
3144 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3145 && !rtx_equal_p (then_val, else_val))
3146 goto done;
3150 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3151 FOR_EACH_VEC_ELT (else_regs, i, reg)
3153 gcc_checking_assert (else_vals.get (reg));
3154 if (!then_vals.get (reg))
3155 ++c;
3158 /* Make sure it is reasonable to convert this block. What matters
3159 is the number of assignments currently made in only one of the
3160 branches, since if we convert we are going to always execute
3161 them. */
3162 if (c > MAX_CONDITIONAL_EXECUTE)
3163 goto done;
3165 /* Try to emit the conditional moves. First do the then block,
3166 then do anything left in the else blocks. */
3167 start_sequence ();
3168 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3169 &then_vals, &else_vals, false)
3170 || (else_bb
3171 && !cond_move_convert_if_block (if_info, else_bb, cond,
3172 &then_vals, &else_vals, true)))
3174 end_sequence ();
3175 goto done;
3177 seq = end_ifcvt_sequence (if_info);
3178 if (!seq)
3179 goto done;
3181 loc_insn = first_active_insn (then_bb);
3182 if (!loc_insn)
3184 loc_insn = first_active_insn (else_bb);
3185 gcc_assert (loc_insn);
3187 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3189 if (else_bb)
3191 delete_basic_block (else_bb);
3192 num_true_changes++;
3194 else
3195 remove_edge (find_edge (test_bb, join_bb));
3197 remove_edge (find_edge (then_bb, join_bb));
3198 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3199 delete_basic_block (then_bb);
3200 num_true_changes++;
3202 if (can_merge_blocks_p (test_bb, join_bb))
3204 merge_blocks (test_bb, join_bb);
3205 num_true_changes++;
3208 num_updated_if_blocks++;
3210 success_p = TRUE;
3212 done:
3213 then_regs.release ();
3214 else_regs.release ();
3215 return success_p;
3219 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3220 IF-THEN-ELSE-JOIN block.
3222 If so, we'll try to convert the insns to not require the branch,
3223 using only transformations that do not require conditional execution.
3225 Return TRUE if we were successful at converting the block. */
3227 static int
3228 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3229 int pass)
3231 basic_block then_bb, else_bb, join_bb;
3232 bool then_else_reversed = false;
3233 rtx_insn *jump;
3234 rtx cond;
3235 rtx_insn *cond_earliest;
3236 struct noce_if_info if_info;
3238 /* We only ever should get here before reload. */
3239 gcc_assert (!reload_completed);
3241 /* Recognize an IF-THEN-ELSE-JOIN block. */
3242 if (single_pred_p (then_edge->dest)
3243 && single_succ_p (then_edge->dest)
3244 && single_pred_p (else_edge->dest)
3245 && single_succ_p (else_edge->dest)
3246 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3248 then_bb = then_edge->dest;
3249 else_bb = else_edge->dest;
3250 join_bb = single_succ (then_bb);
3252 /* Recognize an IF-THEN-JOIN block. */
3253 else if (single_pred_p (then_edge->dest)
3254 && single_succ_p (then_edge->dest)
3255 && single_succ (then_edge->dest) == else_edge->dest)
3257 then_bb = then_edge->dest;
3258 else_bb = NULL_BLOCK;
3259 join_bb = else_edge->dest;
3261 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3262 of basic blocks in cfglayout mode does not matter, so the fallthrough
3263 edge can go to any basic block (and not just to bb->next_bb, like in
3264 cfgrtl mode). */
3265 else if (single_pred_p (else_edge->dest)
3266 && single_succ_p (else_edge->dest)
3267 && single_succ (else_edge->dest) == then_edge->dest)
3269 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3270 To make this work, we have to invert the THEN and ELSE blocks
3271 and reverse the jump condition. */
3272 then_bb = else_edge->dest;
3273 else_bb = NULL_BLOCK;
3274 join_bb = single_succ (then_bb);
3275 then_else_reversed = true;
3277 else
3278 /* Not a form we can handle. */
3279 return FALSE;
3281 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3282 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3283 return FALSE;
3284 if (else_bb
3285 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3286 return FALSE;
3288 num_possible_if_blocks++;
3290 if (dump_file)
3292 fprintf (dump_file,
3293 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3294 (else_bb) ? "-ELSE" : "",
3295 pass, test_bb->index, then_bb->index);
3297 if (else_bb)
3298 fprintf (dump_file, ", else %d", else_bb->index);
3300 fprintf (dump_file, ", join %d\n", join_bb->index);
3303 /* If the conditional jump is more than just a conditional
3304 jump, then we can not do if-conversion on this block. */
3305 jump = BB_END (test_bb);
3306 if (! onlyjump_p (jump))
3307 return FALSE;
3309 /* If this is not a standard conditional jump, we can't parse it. */
3310 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3311 if (!cond)
3312 return FALSE;
3314 /* We must be comparing objects whose modes imply the size. */
3315 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3316 return FALSE;
3318 /* Initialize an IF_INFO struct to pass around. */
3319 memset (&if_info, 0, sizeof if_info);
3320 if_info.test_bb = test_bb;
3321 if_info.then_bb = then_bb;
3322 if_info.else_bb = else_bb;
3323 if_info.join_bb = join_bb;
3324 if_info.cond = cond;
3325 if_info.cond_earliest = cond_earliest;
3326 if_info.jump = jump;
3327 if_info.then_else_reversed = then_else_reversed;
3328 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3329 predictable_edge_p (then_edge));
3331 /* Do the real work. */
3333 if (noce_process_if_block (&if_info))
3334 return TRUE;
3336 if (HAVE_conditional_move
3337 && cond_move_process_if_block (&if_info))
3338 return TRUE;
3340 return FALSE;
3344 /* Merge the blocks and mark for local life update. */
3346 static void
3347 merge_if_block (struct ce_if_block * ce_info)
3349 basic_block test_bb = ce_info->test_bb; /* last test block */
3350 basic_block then_bb = ce_info->then_bb; /* THEN */
3351 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3352 basic_block join_bb = ce_info->join_bb; /* join block */
3353 basic_block combo_bb;
3355 /* All block merging is done into the lower block numbers. */
3357 combo_bb = test_bb;
3358 df_set_bb_dirty (test_bb);
3360 /* Merge any basic blocks to handle && and || subtests. Each of
3361 the blocks are on the fallthru path from the predecessor block. */
3362 if (ce_info->num_multiple_test_blocks > 0)
3364 basic_block bb = test_bb;
3365 basic_block last_test_bb = ce_info->last_test_bb;
3366 basic_block fallthru = block_fallthru (bb);
3370 bb = fallthru;
3371 fallthru = block_fallthru (bb);
3372 merge_blocks (combo_bb, bb);
3373 num_true_changes++;
3375 while (bb != last_test_bb);
3378 /* Merge TEST block into THEN block. Normally the THEN block won't have a
3379 label, but it might if there were || tests. That label's count should be
3380 zero, and it normally should be removed. */
3382 if (then_bb)
3384 /* If THEN_BB has no successors, then there's a BARRIER after it.
3385 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3386 is no longer needed, and in fact it is incorrect to leave it in
3387 the insn stream. */
3388 if (EDGE_COUNT (then_bb->succs) == 0
3389 && EDGE_COUNT (combo_bb->succs) > 1)
3391 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3392 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3393 end = NEXT_INSN (end);
3395 if (end && BARRIER_P (end))
3396 delete_insn (end);
3398 merge_blocks (combo_bb, then_bb);
3399 num_true_changes++;
3402 /* The ELSE block, if it existed, had a label. That label count
3403 will almost always be zero, but odd things can happen when labels
3404 get their addresses taken. */
3405 if (else_bb)
3407 /* If ELSE_BB has no successors, then there's a BARRIER after it.
3408 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3409 is no longer needed, and in fact it is incorrect to leave it in
3410 the insn stream. */
3411 if (EDGE_COUNT (else_bb->succs) == 0
3412 && EDGE_COUNT (combo_bb->succs) > 1)
3414 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3415 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3416 end = NEXT_INSN (end);
3418 if (end && BARRIER_P (end))
3419 delete_insn (end);
3421 merge_blocks (combo_bb, else_bb);
3422 num_true_changes++;
3425 /* If there was no join block reported, that means it was not adjacent
3426 to the others, and so we cannot merge them. */
3428 if (! join_bb)
3430 rtx_insn *last = BB_END (combo_bb);
3432 /* The outgoing edge for the current COMBO block should already
3433 be correct. Verify this. */
3434 if (EDGE_COUNT (combo_bb->succs) == 0)
3435 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3436 || (NONJUMP_INSN_P (last)
3437 && GET_CODE (PATTERN (last)) == TRAP_IF
3438 && (TRAP_CONDITION (PATTERN (last))
3439 == const_true_rtx)));
3441 else
3442 /* There should still be something at the end of the THEN or ELSE
3443 blocks taking us to our final destination. */
3444 gcc_assert (JUMP_P (last)
3445 || (EDGE_SUCC (combo_bb, 0)->dest
3446 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3447 && CALL_P (last)
3448 && SIBLING_CALL_P (last))
3449 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3450 && can_throw_internal (last)));
3453 /* The JOIN block may have had quite a number of other predecessors too.
3454 Since we've already merged the TEST, THEN and ELSE blocks, we should
3455 have only one remaining edge from our if-then-else diamond. If there
3456 is more than one remaining edge, it must come from elsewhere. There
3457 may be zero incoming edges if the THEN block didn't actually join
3458 back up (as with a call to a non-return function). */
3459 else if (EDGE_COUNT (join_bb->preds) < 2
3460 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3462 /* We can merge the JOIN cleanly and update the dataflow try
3463 again on this pass.*/
3464 merge_blocks (combo_bb, join_bb);
3465 num_true_changes++;
3467 else
3469 /* We cannot merge the JOIN. */
3471 /* The outgoing edge for the current COMBO block should already
3472 be correct. Verify this. */
3473 gcc_assert (single_succ_p (combo_bb)
3474 && single_succ (combo_bb) == join_bb);
3476 /* Remove the jump and cruft from the end of the COMBO block. */
3477 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3478 tidy_fallthru_edge (single_succ_edge (combo_bb));
3481 num_updated_if_blocks++;
3484 /* Find a block ending in a simple IF condition and try to transform it
3485 in some way. When converting a multi-block condition, put the new code
3486 in the first such block and delete the rest. Return a pointer to this
3487 first block if some transformation was done. Return NULL otherwise. */
3489 static basic_block
3490 find_if_header (basic_block test_bb, int pass)
3492 ce_if_block ce_info;
3493 edge then_edge;
3494 edge else_edge;
3496 /* The kind of block we're looking for has exactly two successors. */
3497 if (EDGE_COUNT (test_bb->succs) != 2)
3498 return NULL;
3500 then_edge = EDGE_SUCC (test_bb, 0);
3501 else_edge = EDGE_SUCC (test_bb, 1);
3503 if (df_get_bb_dirty (then_edge->dest))
3504 return NULL;
3505 if (df_get_bb_dirty (else_edge->dest))
3506 return NULL;
3508 /* Neither edge should be abnormal. */
3509 if ((then_edge->flags & EDGE_COMPLEX)
3510 || (else_edge->flags & EDGE_COMPLEX))
3511 return NULL;
3513 /* Nor exit the loop. */
3514 if ((then_edge->flags & EDGE_LOOP_EXIT)
3515 || (else_edge->flags & EDGE_LOOP_EXIT))
3516 return NULL;
3518 /* The THEN edge is canonically the one that falls through. */
3519 if (then_edge->flags & EDGE_FALLTHRU)
3521 else if (else_edge->flags & EDGE_FALLTHRU)
3522 std::swap (then_edge, else_edge);
3523 else
3524 /* Otherwise this must be a multiway branch of some sort. */
3525 return NULL;
3527 memset (&ce_info, 0, sizeof (ce_info));
3528 ce_info.test_bb = test_bb;
3529 ce_info.then_bb = then_edge->dest;
3530 ce_info.else_bb = else_edge->dest;
3531 ce_info.pass = pass;
3533 #ifdef IFCVT_MACHDEP_INIT
3534 IFCVT_MACHDEP_INIT (&ce_info);
3535 #endif
3537 if (!reload_completed
3538 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3539 goto success;
3541 if (reload_completed
3542 && targetm.have_conditional_execution ()
3543 && cond_exec_find_if_block (&ce_info))
3544 goto success;
3546 if (targetm.have_trap ()
3547 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3548 && find_cond_trap (test_bb, then_edge, else_edge))
3549 goto success;
3551 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3552 && (reload_completed || !targetm.have_conditional_execution ()))
3554 if (find_if_case_1 (test_bb, then_edge, else_edge))
3555 goto success;
3556 if (find_if_case_2 (test_bb, then_edge, else_edge))
3557 goto success;
3560 return NULL;
3562 success:
3563 if (dump_file)
3564 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3565 /* Set this so we continue looking. */
3566 cond_exec_changed_p = TRUE;
3567 return ce_info.test_bb;
3570 /* Return true if a block has two edges, one of which falls through to the next
3571 block, and the other jumps to a specific block, so that we can tell if the
3572 block is part of an && test or an || test. Returns either -1 or the number
3573 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3575 static int
3576 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3578 edge cur_edge;
3579 int fallthru_p = FALSE;
3580 int jump_p = FALSE;
3581 rtx_insn *insn;
3582 rtx_insn *end;
3583 int n_insns = 0;
3584 edge_iterator ei;
3586 if (!cur_bb || !target_bb)
3587 return -1;
3589 /* If no edges, obviously it doesn't jump or fallthru. */
3590 if (EDGE_COUNT (cur_bb->succs) == 0)
3591 return FALSE;
3593 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3595 if (cur_edge->flags & EDGE_COMPLEX)
3596 /* Anything complex isn't what we want. */
3597 return -1;
3599 else if (cur_edge->flags & EDGE_FALLTHRU)
3600 fallthru_p = TRUE;
3602 else if (cur_edge->dest == target_bb)
3603 jump_p = TRUE;
3605 else
3606 return -1;
3609 if ((jump_p & fallthru_p) == 0)
3610 return -1;
3612 /* Don't allow calls in the block, since this is used to group && and ||
3613 together for conditional execution support. ??? we should support
3614 conditional execution support across calls for IA-64 some day, but
3615 for now it makes the code simpler. */
3616 end = BB_END (cur_bb);
3617 insn = BB_HEAD (cur_bb);
3619 while (insn != NULL_RTX)
3621 if (CALL_P (insn))
3622 return -1;
3624 if (INSN_P (insn)
3625 && !JUMP_P (insn)
3626 && !DEBUG_INSN_P (insn)
3627 && GET_CODE (PATTERN (insn)) != USE
3628 && GET_CODE (PATTERN (insn)) != CLOBBER)
3629 n_insns++;
3631 if (insn == end)
3632 break;
3634 insn = NEXT_INSN (insn);
3637 return n_insns;
3640 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3641 block. If so, we'll try to convert the insns to not require the branch.
3642 Return TRUE if we were successful at converting the block. */
3644 static int
3645 cond_exec_find_if_block (struct ce_if_block * ce_info)
3647 basic_block test_bb = ce_info->test_bb;
3648 basic_block then_bb = ce_info->then_bb;
3649 basic_block else_bb = ce_info->else_bb;
3650 basic_block join_bb = NULL_BLOCK;
3651 edge cur_edge;
3652 basic_block next;
3653 edge_iterator ei;
3655 ce_info->last_test_bb = test_bb;
3657 /* We only ever should get here after reload,
3658 and if we have conditional execution. */
3659 gcc_assert (reload_completed && targetm.have_conditional_execution ());
3661 /* Discover if any fall through predecessors of the current test basic block
3662 were && tests (which jump to the else block) or || tests (which jump to
3663 the then block). */
3664 if (single_pred_p (test_bb)
3665 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3667 basic_block bb = single_pred (test_bb);
3668 basic_block target_bb;
3669 int max_insns = MAX_CONDITIONAL_EXECUTE;
3670 int n_insns;
3672 /* Determine if the preceding block is an && or || block. */
3673 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3675 ce_info->and_and_p = TRUE;
3676 target_bb = else_bb;
3678 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3680 ce_info->and_and_p = FALSE;
3681 target_bb = then_bb;
3683 else
3684 target_bb = NULL_BLOCK;
3686 if (target_bb && n_insns <= max_insns)
3688 int total_insns = 0;
3689 int blocks = 0;
3691 ce_info->last_test_bb = test_bb;
3693 /* Found at least one && or || block, look for more. */
3696 ce_info->test_bb = test_bb = bb;
3697 total_insns += n_insns;
3698 blocks++;
3700 if (!single_pred_p (bb))
3701 break;
3703 bb = single_pred (bb);
3704 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3706 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3708 ce_info->num_multiple_test_blocks = blocks;
3709 ce_info->num_multiple_test_insns = total_insns;
3711 if (ce_info->and_and_p)
3712 ce_info->num_and_and_blocks = blocks;
3713 else
3714 ce_info->num_or_or_blocks = blocks;
3718 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3719 other than any || blocks which jump to the THEN block. */
3720 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3721 return FALSE;
3723 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3724 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3726 if (cur_edge->flags & EDGE_COMPLEX)
3727 return FALSE;
3730 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3732 if (cur_edge->flags & EDGE_COMPLEX)
3733 return FALSE;
3736 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3737 if (EDGE_COUNT (then_bb->succs) > 0
3738 && (!single_succ_p (then_bb)
3739 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3740 || (epilogue_completed
3741 && tablejump_p (BB_END (then_bb), NULL, NULL))))
3742 return FALSE;
3744 /* If the THEN block has no successors, conditional execution can still
3745 make a conditional call. Don't do this unless the ELSE block has
3746 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3747 Check for the last insn of the THEN block being an indirect jump, which
3748 is listed as not having any successors, but confuses the rest of the CE
3749 code processing. ??? we should fix this in the future. */
3750 if (EDGE_COUNT (then_bb->succs) == 0)
3752 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3754 rtx_insn *last_insn = BB_END (then_bb);
3756 while (last_insn
3757 && NOTE_P (last_insn)
3758 && last_insn != BB_HEAD (then_bb))
3759 last_insn = PREV_INSN (last_insn);
3761 if (last_insn
3762 && JUMP_P (last_insn)
3763 && ! simplejump_p (last_insn))
3764 return FALSE;
3766 join_bb = else_bb;
3767 else_bb = NULL_BLOCK;
3769 else
3770 return FALSE;
3773 /* If the THEN block's successor is the other edge out of the TEST block,
3774 then we have an IF-THEN combo without an ELSE. */
3775 else if (single_succ (then_bb) == else_bb)
3777 join_bb = else_bb;
3778 else_bb = NULL_BLOCK;
3781 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3782 has exactly one predecessor and one successor, and the outgoing edge
3783 is not complex, then we have an IF-THEN-ELSE combo. */
3784 else if (single_succ_p (else_bb)
3785 && single_succ (then_bb) == single_succ (else_bb)
3786 && single_pred_p (else_bb)
3787 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3788 && !(epilogue_completed
3789 && tablejump_p (BB_END (else_bb), NULL, NULL)))
3790 join_bb = single_succ (else_bb);
3792 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3793 else
3794 return FALSE;
3796 num_possible_if_blocks++;
3798 if (dump_file)
3800 fprintf (dump_file,
3801 "\nIF-THEN%s block found, pass %d, start block %d "
3802 "[insn %d], then %d [%d]",
3803 (else_bb) ? "-ELSE" : "",
3804 ce_info->pass,
3805 test_bb->index,
3806 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3807 then_bb->index,
3808 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3810 if (else_bb)
3811 fprintf (dump_file, ", else %d [%d]",
3812 else_bb->index,
3813 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3815 fprintf (dump_file, ", join %d [%d]",
3816 join_bb->index,
3817 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3819 if (ce_info->num_multiple_test_blocks > 0)
3820 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3821 ce_info->num_multiple_test_blocks,
3822 (ce_info->and_and_p) ? "&&" : "||",
3823 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3824 ce_info->last_test_bb->index,
3825 ((BB_HEAD (ce_info->last_test_bb))
3826 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3827 : -1));
3829 fputc ('\n', dump_file);
3832 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3833 first condition for free, since we've already asserted that there's a
3834 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3835 we checked the FALLTHRU flag, those are already adjacent to the last IF
3836 block. */
3837 /* ??? As an enhancement, move the ELSE block. Have to deal with
3838 BLOCK notes, if by no other means than backing out the merge if they
3839 exist. Sticky enough I don't want to think about it now. */
3840 next = then_bb;
3841 if (else_bb && (next = next->next_bb) != else_bb)
3842 return FALSE;
3843 if ((next = next->next_bb) != join_bb
3844 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3846 if (else_bb)
3847 join_bb = NULL;
3848 else
3849 return FALSE;
3852 /* Do the real work. */
3854 ce_info->else_bb = else_bb;
3855 ce_info->join_bb = join_bb;
3857 /* If we have && and || tests, try to first handle combining the && and ||
3858 tests into the conditional code, and if that fails, go back and handle
3859 it without the && and ||, which at present handles the && case if there
3860 was no ELSE block. */
3861 if (cond_exec_process_if_block (ce_info, TRUE))
3862 return TRUE;
3864 if (ce_info->num_multiple_test_blocks)
3866 cancel_changes (0);
3868 if (cond_exec_process_if_block (ce_info, FALSE))
3869 return TRUE;
3872 return FALSE;
3875 /* Convert a branch over a trap, or a branch
3876 to a trap, into a conditional trap. */
3878 static int
3879 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3881 basic_block then_bb = then_edge->dest;
3882 basic_block else_bb = else_edge->dest;
3883 basic_block other_bb, trap_bb;
3884 rtx_insn *trap, *jump;
3885 rtx cond;
3886 rtx_insn *cond_earliest;
3887 enum rtx_code code;
3889 /* Locate the block with the trap instruction. */
3890 /* ??? While we look for no successors, we really ought to allow
3891 EH successors. Need to fix merge_if_block for that to work. */
3892 if ((trap = block_has_only_trap (then_bb)) != NULL)
3893 trap_bb = then_bb, other_bb = else_bb;
3894 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3895 trap_bb = else_bb, other_bb = then_bb;
3896 else
3897 return FALSE;
3899 if (dump_file)
3901 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3902 test_bb->index, trap_bb->index);
3905 /* If this is not a standard conditional jump, we can't parse it. */
3906 jump = BB_END (test_bb);
3907 cond = noce_get_condition (jump, &cond_earliest, false);
3908 if (! cond)
3909 return FALSE;
3911 /* If the conditional jump is more than just a conditional jump, then
3912 we can not do if-conversion on this block. */
3913 if (! onlyjump_p (jump))
3914 return FALSE;
3916 /* We must be comparing objects whose modes imply the size. */
3917 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3918 return FALSE;
3920 /* Reverse the comparison code, if necessary. */
3921 code = GET_CODE (cond);
3922 if (then_bb == trap_bb)
3924 code = reversed_comparison_code (cond, jump);
3925 if (code == UNKNOWN)
3926 return FALSE;
3929 /* Attempt to generate the conditional trap. */
3930 rtx_insn *seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3931 copy_rtx (XEXP (cond, 1)),
3932 TRAP_CODE (PATTERN (trap)));
3933 if (seq == NULL)
3934 return FALSE;
3936 /* Emit the new insns before cond_earliest. */
3937 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3939 /* Delete the trap block if possible. */
3940 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3941 df_set_bb_dirty (test_bb);
3942 df_set_bb_dirty (then_bb);
3943 df_set_bb_dirty (else_bb);
3945 if (EDGE_COUNT (trap_bb->preds) == 0)
3947 delete_basic_block (trap_bb);
3948 num_true_changes++;
3951 /* Wire together the blocks again. */
3952 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3953 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3954 else if (trap_bb == then_bb)
3956 rtx lab = JUMP_LABEL (jump);
3957 rtx_insn *seq = targetm.gen_jump (lab);
3958 rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
3959 LABEL_NUSES (lab) += 1;
3960 JUMP_LABEL (newjump) = lab;
3961 emit_barrier_after (newjump);
3963 delete_insn (jump);
3965 if (can_merge_blocks_p (test_bb, other_bb))
3967 merge_blocks (test_bb, other_bb);
3968 num_true_changes++;
3971 num_updated_if_blocks++;
3972 return TRUE;
3975 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3976 return it. */
3978 static rtx_insn *
3979 block_has_only_trap (basic_block bb)
3981 rtx_insn *trap;
3983 /* We're not the exit block. */
3984 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3985 return NULL;
3987 /* The block must have no successors. */
3988 if (EDGE_COUNT (bb->succs) > 0)
3989 return NULL;
3991 /* The only instruction in the THEN block must be the trap. */
3992 trap = first_active_insn (bb);
3993 if (! (trap == BB_END (bb)
3994 && GET_CODE (PATTERN (trap)) == TRAP_IF
3995 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3996 return NULL;
3998 return trap;
4001 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
4002 transformable, but not necessarily the other. There need be no
4003 JOIN block.
4005 Return TRUE if we were successful at converting the block.
4007 Cases we'd like to look at:
4010 if (test) goto over; // x not live
4011 x = a;
4012 goto label;
4013 over:
4015 becomes
4017 x = a;
4018 if (! test) goto label;
4021 if (test) goto E; // x not live
4022 x = big();
4023 goto L;
4025 x = b;
4026 goto M;
4028 becomes
4030 x = b;
4031 if (test) goto M;
4032 x = big();
4033 goto L;
4035 (3) // This one's really only interesting for targets that can do
4036 // multiway branching, e.g. IA-64 BBB bundles. For other targets
4037 // it results in multiple branches on a cache line, which often
4038 // does not sit well with predictors.
4040 if (test1) goto E; // predicted not taken
4041 x = a;
4042 if (test2) goto F;
4045 x = b;
4048 becomes
4050 x = a;
4051 if (test1) goto E;
4052 if (test2) goto F;
4054 Notes:
4056 (A) Don't do (2) if the branch is predicted against the block we're
4057 eliminating. Do it anyway if we can eliminate a branch; this requires
4058 that the sole successor of the eliminated block postdominate the other
4059 side of the if.
4061 (B) With CE, on (3) we can steal from both sides of the if, creating
4063 if (test1) x = a;
4064 if (!test1) x = b;
4065 if (test1) goto J;
4066 if (test2) goto F;
4070 Again, this is most useful if J postdominates.
4072 (C) CE substitutes for helpful life information.
4074 (D) These heuristics need a lot of work. */
4076 /* Tests for case 1 above. */
4078 static int
4079 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
4081 basic_block then_bb = then_edge->dest;
4082 basic_block else_bb = else_edge->dest;
4083 basic_block new_bb;
4084 int then_bb_index, then_prob;
4085 rtx else_target = NULL_RTX;
4087 /* If we are partitioning hot/cold basic blocks, we don't want to
4088 mess up unconditional or indirect jumps that cross between hot
4089 and cold sections.
4091 Basic block partitioning may result in some jumps that appear to
4092 be optimizable (or blocks that appear to be mergeable), but which really
4093 must be left untouched (they are required to make it safely across
4094 partition boundaries). See the comments at the top of
4095 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4097 if ((BB_END (then_bb)
4098 && JUMP_P (BB_END (then_bb))
4099 && CROSSING_JUMP_P (BB_END (then_bb)))
4100 || (BB_END (test_bb)
4101 && JUMP_P (BB_END (test_bb))
4102 && CROSSING_JUMP_P (BB_END (test_bb)))
4103 || (BB_END (else_bb)
4104 && JUMP_P (BB_END (else_bb))
4105 && CROSSING_JUMP_P (BB_END (else_bb))))
4106 return FALSE;
4108 /* THEN has one successor. */
4109 if (!single_succ_p (then_bb))
4110 return FALSE;
4112 /* THEN does not fall through, but is not strange either. */
4113 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
4114 return FALSE;
4116 /* THEN has one predecessor. */
4117 if (!single_pred_p (then_bb))
4118 return FALSE;
4120 /* THEN must do something. */
4121 if (forwarder_block_p (then_bb))
4122 return FALSE;
4124 num_possible_if_blocks++;
4125 if (dump_file)
4126 fprintf (dump_file,
4127 "\nIF-CASE-1 found, start %d, then %d\n",
4128 test_bb->index, then_bb->index);
4130 if (then_edge->probability)
4131 then_prob = REG_BR_PROB_BASE - then_edge->probability;
4132 else
4133 then_prob = REG_BR_PROB_BASE / 2;
4135 /* We're speculating from the THEN path, we want to make sure the cost
4136 of speculation is within reason. */
4137 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4138 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4139 predictable_edge_p (then_edge)))))
4140 return FALSE;
4142 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4144 rtx_insn *jump = BB_END (else_edge->src);
4145 gcc_assert (JUMP_P (jump));
4146 else_target = JUMP_LABEL (jump);
4149 /* Registers set are dead, or are predicable. */
4150 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4151 single_succ_edge (then_bb), 1))
4152 return FALSE;
4154 /* Conversion went ok, including moving the insns and fixing up the
4155 jump. Adjust the CFG to match. */
4157 /* We can avoid creating a new basic block if then_bb is immediately
4158 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4159 through to else_bb. */
4161 if (then_bb->next_bb == else_bb
4162 && then_bb->prev_bb == test_bb
4163 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4165 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4166 new_bb = 0;
4168 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4169 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4170 else_bb, else_target);
4171 else
4172 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4173 else_bb);
4175 df_set_bb_dirty (test_bb);
4176 df_set_bb_dirty (else_bb);
4178 then_bb_index = then_bb->index;
4179 delete_basic_block (then_bb);
4181 /* Make rest of code believe that the newly created block is the THEN_BB
4182 block we removed. */
4183 if (new_bb)
4185 df_bb_replace (then_bb_index, new_bb);
4186 /* This should have been done above via force_nonfallthru_and_redirect
4187 (possibly called from redirect_edge_and_branch_force). */
4188 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4191 num_true_changes++;
4192 num_updated_if_blocks++;
4194 return TRUE;
4197 /* Test for case 2 above. */
4199 static int
4200 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4202 basic_block then_bb = then_edge->dest;
4203 basic_block else_bb = else_edge->dest;
4204 edge else_succ;
4205 int then_prob, else_prob;
4207 /* We do not want to speculate (empty) loop latches. */
4208 if (current_loops
4209 && else_bb->loop_father->latch == else_bb)
4210 return FALSE;
4212 /* If we are partitioning hot/cold basic blocks, we don't want to
4213 mess up unconditional or indirect jumps that cross between hot
4214 and cold sections.
4216 Basic block partitioning may result in some jumps that appear to
4217 be optimizable (or blocks that appear to be mergeable), but which really
4218 must be left untouched (they are required to make it safely across
4219 partition boundaries). See the comments at the top of
4220 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4222 if ((BB_END (then_bb)
4223 && JUMP_P (BB_END (then_bb))
4224 && CROSSING_JUMP_P (BB_END (then_bb)))
4225 || (BB_END (test_bb)
4226 && JUMP_P (BB_END (test_bb))
4227 && CROSSING_JUMP_P (BB_END (test_bb)))
4228 || (BB_END (else_bb)
4229 && JUMP_P (BB_END (else_bb))
4230 && CROSSING_JUMP_P (BB_END (else_bb))))
4231 return FALSE;
4233 /* ELSE has one successor. */
4234 if (!single_succ_p (else_bb))
4235 return FALSE;
4236 else
4237 else_succ = single_succ_edge (else_bb);
4239 /* ELSE outgoing edge is not complex. */
4240 if (else_succ->flags & EDGE_COMPLEX)
4241 return FALSE;
4243 /* ELSE has one predecessor. */
4244 if (!single_pred_p (else_bb))
4245 return FALSE;
4247 /* THEN is not EXIT. */
4248 if (then_bb->index < NUM_FIXED_BLOCKS)
4249 return FALSE;
4251 if (else_edge->probability)
4253 else_prob = else_edge->probability;
4254 then_prob = REG_BR_PROB_BASE - else_prob;
4256 else
4258 else_prob = REG_BR_PROB_BASE / 2;
4259 then_prob = REG_BR_PROB_BASE / 2;
4262 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4263 if (else_prob > then_prob)
4265 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4266 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4267 else_succ->dest))
4269 else
4270 return FALSE;
4272 num_possible_if_blocks++;
4273 if (dump_file)
4274 fprintf (dump_file,
4275 "\nIF-CASE-2 found, start %d, else %d\n",
4276 test_bb->index, else_bb->index);
4278 /* We're speculating from the ELSE path, we want to make sure the cost
4279 of speculation is within reason. */
4280 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4281 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4282 predictable_edge_p (else_edge)))))
4283 return FALSE;
4285 /* Registers set are dead, or are predicable. */
4286 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4287 return FALSE;
4289 /* Conversion went ok, including moving the insns and fixing up the
4290 jump. Adjust the CFG to match. */
4292 df_set_bb_dirty (test_bb);
4293 df_set_bb_dirty (then_bb);
4294 delete_basic_block (else_bb);
4296 num_true_changes++;
4297 num_updated_if_blocks++;
4299 /* ??? We may now fallthru from one of THEN's successors into a join
4300 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4302 return TRUE;
4305 /* Used by the code above to perform the actual rtl transformations.
4306 Return TRUE if successful.
4308 TEST_BB is the block containing the conditional branch. MERGE_BB
4309 is the block containing the code to manipulate. DEST_EDGE is an
4310 edge representing a jump to the join block; after the conversion,
4311 TEST_BB should be branching to its destination.
4312 REVERSEP is true if the sense of the branch should be reversed. */
4314 static int
4315 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4316 basic_block other_bb, edge dest_edge, int reversep)
4318 basic_block new_dest = dest_edge->dest;
4319 rtx_insn *head, *end, *jump;
4320 rtx_insn *earliest = NULL;
4321 rtx old_dest;
4322 bitmap merge_set = NULL;
4323 /* Number of pending changes. */
4324 int n_validated_changes = 0;
4325 rtx new_dest_label = NULL_RTX;
4327 jump = BB_END (test_bb);
4329 /* Find the extent of the real code in the merge block. */
4330 head = BB_HEAD (merge_bb);
4331 end = BB_END (merge_bb);
4333 while (DEBUG_INSN_P (end) && end != head)
4334 end = PREV_INSN (end);
4336 /* If merge_bb ends with a tablejump, predicating/moving insn's
4337 into test_bb and then deleting merge_bb will result in the jumptable
4338 that follows merge_bb being removed along with merge_bb and then we
4339 get an unresolved reference to the jumptable. */
4340 if (tablejump_p (end, NULL, NULL))
4341 return FALSE;
4343 if (LABEL_P (head))
4344 head = NEXT_INSN (head);
4345 while (DEBUG_INSN_P (head) && head != end)
4346 head = NEXT_INSN (head);
4347 if (NOTE_P (head))
4349 if (head == end)
4351 head = end = NULL;
4352 goto no_body;
4354 head = NEXT_INSN (head);
4355 while (DEBUG_INSN_P (head) && head != end)
4356 head = NEXT_INSN (head);
4359 if (JUMP_P (end))
4361 if (!onlyjump_p (end))
4362 return FALSE;
4363 if (head == end)
4365 head = end = NULL;
4366 goto no_body;
4368 end = PREV_INSN (end);
4369 while (DEBUG_INSN_P (end) && end != head)
4370 end = PREV_INSN (end);
4373 /* Don't move frame-related insn across the conditional branch. This
4374 can lead to one of the paths of the branch having wrong unwind info. */
4375 if (epilogue_completed)
4377 rtx_insn *insn = head;
4378 while (1)
4380 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4381 return FALSE;
4382 if (insn == end)
4383 break;
4384 insn = NEXT_INSN (insn);
4388 /* Disable handling dead code by conditional execution if the machine needs
4389 to do anything funny with the tests, etc. */
4390 #ifndef IFCVT_MODIFY_TESTS
4391 if (targetm.have_conditional_execution ())
4393 /* In the conditional execution case, we have things easy. We know
4394 the condition is reversible. We don't have to check life info
4395 because we're going to conditionally execute the code anyway.
4396 All that's left is making sure the insns involved can actually
4397 be predicated. */
4399 rtx cond;
4401 cond = cond_exec_get_condition (jump);
4402 if (! cond)
4403 return FALSE;
4405 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4406 int prob_val = (note ? XINT (note, 0) : -1);
4408 if (reversep)
4410 enum rtx_code rev = reversed_comparison_code (cond, jump);
4411 if (rev == UNKNOWN)
4412 return FALSE;
4413 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4414 XEXP (cond, 1));
4415 if (prob_val >= 0)
4416 prob_val = REG_BR_PROB_BASE - prob_val;
4419 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4420 && verify_changes (0))
4421 n_validated_changes = num_validated_changes ();
4422 else
4423 cancel_changes (0);
4425 earliest = jump;
4427 #endif
4429 /* If we allocated new pseudos (e.g. in the conditional move
4430 expander called from noce_emit_cmove), we must resize the
4431 array first. */
4432 if (max_regno < max_reg_num ())
4433 max_regno = max_reg_num ();
4435 /* Try the NCE path if the CE path did not result in any changes. */
4436 if (n_validated_changes == 0)
4438 rtx cond;
4439 rtx_insn *insn;
4440 regset live;
4441 bool success;
4443 /* In the non-conditional execution case, we have to verify that there
4444 are no trapping operations, no calls, no references to memory, and
4445 that any registers modified are dead at the branch site. */
4447 if (!any_condjump_p (jump))
4448 return FALSE;
4450 /* Find the extent of the conditional. */
4451 cond = noce_get_condition (jump, &earliest, false);
4452 if (!cond)
4453 return FALSE;
4455 live = BITMAP_ALLOC (&reg_obstack);
4456 simulate_backwards_to_point (merge_bb, live, end);
4457 success = can_move_insns_across (head, end, earliest, jump,
4458 merge_bb, live,
4459 df_get_live_in (other_bb), NULL);
4460 BITMAP_FREE (live);
4461 if (!success)
4462 return FALSE;
4464 /* Collect the set of registers set in MERGE_BB. */
4465 merge_set = BITMAP_ALLOC (&reg_obstack);
4467 FOR_BB_INSNS (merge_bb, insn)
4468 if (NONDEBUG_INSN_P (insn))
4469 df_simulate_find_defs (insn, merge_set);
4471 /* If shrink-wrapping, disable this optimization when test_bb is
4472 the first basic block and merge_bb exits. The idea is to not
4473 move code setting up a return register as that may clobber a
4474 register used to pass function parameters, which then must be
4475 saved in caller-saved regs. A caller-saved reg requires the
4476 prologue, killing a shrink-wrap opportunity. */
4477 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4478 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4479 && single_succ_p (new_dest)
4480 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4481 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4483 regset return_regs;
4484 unsigned int i;
4486 return_regs = BITMAP_ALLOC (&reg_obstack);
4488 /* Start off with the intersection of regs used to pass
4489 params and regs used to return values. */
4490 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4491 if (FUNCTION_ARG_REGNO_P (i)
4492 && targetm.calls.function_value_regno_p (i))
4493 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4495 bitmap_and_into (return_regs,
4496 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4497 bitmap_and_into (return_regs,
4498 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4499 if (!bitmap_empty_p (return_regs))
4501 FOR_BB_INSNS_REVERSE (new_dest, insn)
4502 if (NONDEBUG_INSN_P (insn))
4504 df_ref def;
4506 /* If this insn sets any reg in return_regs, add all
4507 reg uses to the set of regs we're interested in. */
4508 FOR_EACH_INSN_DEF (def, insn)
4509 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4511 df_simulate_uses (insn, return_regs);
4512 break;
4515 if (bitmap_intersect_p (merge_set, return_regs))
4517 BITMAP_FREE (return_regs);
4518 BITMAP_FREE (merge_set);
4519 return FALSE;
4522 BITMAP_FREE (return_regs);
4526 no_body:
4527 /* We don't want to use normal invert_jump or redirect_jump because
4528 we don't want to delete_insn called. Also, we want to do our own
4529 change group management. */
4531 old_dest = JUMP_LABEL (jump);
4532 if (other_bb != new_dest)
4534 if (!any_condjump_p (jump))
4535 goto cancel;
4537 if (JUMP_P (BB_END (dest_edge->src)))
4538 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4539 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4540 new_dest_label = ret_rtx;
4541 else
4542 new_dest_label = block_label (new_dest);
4544 rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
4545 if (reversep
4546 ? ! invert_jump_1 (jump_insn, new_dest_label)
4547 : ! redirect_jump_1 (jump_insn, new_dest_label))
4548 goto cancel;
4551 if (verify_changes (n_validated_changes))
4552 confirm_change_group ();
4553 else
4554 goto cancel;
4556 if (other_bb != new_dest)
4558 redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
4559 0, reversep);
4561 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4562 if (reversep)
4564 std::swap (BRANCH_EDGE (test_bb)->count,
4565 FALLTHRU_EDGE (test_bb)->count);
4566 std::swap (BRANCH_EDGE (test_bb)->probability,
4567 FALLTHRU_EDGE (test_bb)->probability);
4568 update_br_prob_note (test_bb);
4572 /* Move the insns out of MERGE_BB to before the branch. */
4573 if (head != NULL)
4575 rtx_insn *insn;
4577 if (end == BB_END (merge_bb))
4578 BB_END (merge_bb) = PREV_INSN (head);
4580 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4581 notes being moved might become invalid. */
4582 insn = head;
4585 rtx note;
4587 if (! INSN_P (insn))
4588 continue;
4589 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4590 if (! note)
4591 continue;
4592 remove_note (insn, note);
4593 } while (insn != end && (insn = NEXT_INSN (insn)));
4595 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4596 notes referring to the registers being set might become invalid. */
4597 if (merge_set)
4599 unsigned i;
4600 bitmap_iterator bi;
4602 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4603 remove_reg_equal_equiv_notes_for_regno (i);
4605 BITMAP_FREE (merge_set);
4608 reorder_insns (head, end, PREV_INSN (earliest));
4611 /* Remove the jump and edge if we can. */
4612 if (other_bb == new_dest)
4614 delete_insn (jump);
4615 remove_edge (BRANCH_EDGE (test_bb));
4616 /* ??? Can't merge blocks here, as then_bb is still in use.
4617 At minimum, the merge will get done just before bb-reorder. */
4620 return TRUE;
4622 cancel:
4623 cancel_changes (0);
4625 if (merge_set)
4626 BITMAP_FREE (merge_set);
4628 return FALSE;
4631 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
4632 we are after combine pass. */
4634 static void
4635 if_convert (bool after_combine)
4637 basic_block bb;
4638 int pass;
4640 if (optimize == 1)
4642 df_live_add_problem ();
4643 df_live_set_all_dirty ();
4646 /* Record whether we are after combine pass. */
4647 ifcvt_after_combine = after_combine;
4648 num_possible_if_blocks = 0;
4649 num_updated_if_blocks = 0;
4650 num_true_changes = 0;
4652 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4653 mark_loop_exit_edges ();
4654 loop_optimizer_finalize ();
4655 free_dominance_info (CDI_DOMINATORS);
4657 /* Compute postdominators. */
4658 calculate_dominance_info (CDI_POST_DOMINATORS);
4660 df_set_flags (DF_LR_RUN_DCE);
4662 /* Go through each of the basic blocks looking for things to convert. If we
4663 have conditional execution, we make multiple passes to allow us to handle
4664 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4665 pass = 0;
4668 df_analyze ();
4669 /* Only need to do dce on the first pass. */
4670 df_clear_flags (DF_LR_RUN_DCE);
4671 cond_exec_changed_p = FALSE;
4672 pass++;
4674 #ifdef IFCVT_MULTIPLE_DUMPS
4675 if (dump_file && pass > 1)
4676 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4677 #endif
4679 FOR_EACH_BB_FN (bb, cfun)
4681 basic_block new_bb;
4682 while (!df_get_bb_dirty (bb)
4683 && (new_bb = find_if_header (bb, pass)) != NULL)
4684 bb = new_bb;
4687 #ifdef IFCVT_MULTIPLE_DUMPS
4688 if (dump_file && cond_exec_changed_p)
4689 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4690 #endif
4692 while (cond_exec_changed_p);
4694 #ifdef IFCVT_MULTIPLE_DUMPS
4695 if (dump_file)
4696 fprintf (dump_file, "\n\n========== no more changes\n");
4697 #endif
4699 free_dominance_info (CDI_POST_DOMINATORS);
4701 if (dump_file)
4702 fflush (dump_file);
4704 clear_aux_for_blocks ();
4706 /* If we allocated new pseudos, we must resize the array for sched1. */
4707 if (max_regno < max_reg_num ())
4708 max_regno = max_reg_num ();
4710 /* Write the final stats. */
4711 if (dump_file && num_possible_if_blocks > 0)
4713 fprintf (dump_file,
4714 "\n%d possible IF blocks searched.\n",
4715 num_possible_if_blocks);
4716 fprintf (dump_file,
4717 "%d IF blocks converted.\n",
4718 num_updated_if_blocks);
4719 fprintf (dump_file,
4720 "%d true changes made.\n\n\n",
4721 num_true_changes);
4724 if (optimize == 1)
4725 df_remove_problem (df_live);
4727 #ifdef ENABLE_CHECKING
4728 verify_flow_info ();
4729 #endif
4732 /* If-conversion and CFG cleanup. */
4733 static unsigned int
4734 rest_of_handle_if_conversion (void)
4736 if (flag_if_conversion)
4738 if (dump_file)
4740 dump_reg_info (dump_file);
4741 dump_flow_info (dump_file, dump_flags);
4743 cleanup_cfg (CLEANUP_EXPENSIVE);
4744 if_convert (false);
4747 cleanup_cfg (0);
4748 return 0;
4751 namespace {
4753 const pass_data pass_data_rtl_ifcvt =
4755 RTL_PASS, /* type */
4756 "ce1", /* name */
4757 OPTGROUP_NONE, /* optinfo_flags */
4758 TV_IFCVT, /* tv_id */
4759 0, /* properties_required */
4760 0, /* properties_provided */
4761 0, /* properties_destroyed */
4762 0, /* todo_flags_start */
4763 TODO_df_finish, /* todo_flags_finish */
4766 class pass_rtl_ifcvt : public rtl_opt_pass
4768 public:
4769 pass_rtl_ifcvt (gcc::context *ctxt)
4770 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4773 /* opt_pass methods: */
4774 virtual bool gate (function *)
4776 return (optimize > 0) && dbg_cnt (if_conversion);
4779 virtual unsigned int execute (function *)
4781 return rest_of_handle_if_conversion ();
4784 }; // class pass_rtl_ifcvt
4786 } // anon namespace
4788 rtl_opt_pass *
4789 make_pass_rtl_ifcvt (gcc::context *ctxt)
4791 return new pass_rtl_ifcvt (ctxt);
4795 /* Rerun if-conversion, as combine may have simplified things enough
4796 to now meet sequence length restrictions. */
4798 namespace {
4800 const pass_data pass_data_if_after_combine =
4802 RTL_PASS, /* type */
4803 "ce2", /* name */
4804 OPTGROUP_NONE, /* optinfo_flags */
4805 TV_IFCVT, /* tv_id */
4806 0, /* properties_required */
4807 0, /* properties_provided */
4808 0, /* properties_destroyed */
4809 0, /* todo_flags_start */
4810 TODO_df_finish, /* todo_flags_finish */
4813 class pass_if_after_combine : public rtl_opt_pass
4815 public:
4816 pass_if_after_combine (gcc::context *ctxt)
4817 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4820 /* opt_pass methods: */
4821 virtual bool gate (function *)
4823 return optimize > 0 && flag_if_conversion
4824 && dbg_cnt (if_after_combine);
4827 virtual unsigned int execute (function *)
4829 if_convert (true);
4830 return 0;
4833 }; // class pass_if_after_combine
4835 } // anon namespace
4837 rtl_opt_pass *
4838 make_pass_if_after_combine (gcc::context *ctxt)
4840 return new pass_if_after_combine (ctxt);
4844 namespace {
4846 const pass_data pass_data_if_after_reload =
4848 RTL_PASS, /* type */
4849 "ce3", /* name */
4850 OPTGROUP_NONE, /* optinfo_flags */
4851 TV_IFCVT2, /* tv_id */
4852 0, /* properties_required */
4853 0, /* properties_provided */
4854 0, /* properties_destroyed */
4855 0, /* todo_flags_start */
4856 TODO_df_finish, /* todo_flags_finish */
4859 class pass_if_after_reload : public rtl_opt_pass
4861 public:
4862 pass_if_after_reload (gcc::context *ctxt)
4863 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4866 /* opt_pass methods: */
4867 virtual bool gate (function *)
4869 return optimize > 0 && flag_if_conversion2
4870 && dbg_cnt (if_after_reload);
4873 virtual unsigned int execute (function *)
4875 if_convert (true);
4876 return 0;
4879 }; // class pass_if_after_reload
4881 } // anon namespace
4883 rtl_opt_pass *
4884 make_pass_if_after_reload (gcc::context *ctxt)
4886 return new pass_if_after_reload (ctxt);