* c-typeck.c (convert_arguments): Don't check for width changes
[official-gcc.git] / gcc / ifcvt.c
blobf40a656da34448f5607967c37d44ab85aebfc421
1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "real.h"
34 #include "output.h"
35 #include "toplev.h"
36 #include "tm_p.h"
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
41 #endif
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
44 #endif
45 #ifndef HAVE_incscc
46 #define HAVE_incscc 0
47 #endif
48 #ifndef HAVE_decscc
49 #define HAVE_decscc 0
50 #endif
52 #ifndef MAX_CONDITIONAL_EXECUTE
53 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
54 #endif
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
63 execution. */
64 static int num_updated_if_blocks;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks;
69 /* The post-dominator relation on the original block numbers. */
70 static sbitmap *post_dominators;
72 /* Forward references. */
73 static int count_bb_insns PARAMS ((basic_block));
74 static rtx first_active_insn PARAMS ((basic_block));
75 static int last_active_insn_p PARAMS ((basic_block, rtx));
76 static int seq_contains_jump PARAMS ((rtx));
78 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
79 static rtx cond_exec_get_condition PARAMS ((rtx));
80 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
81 basic_block, basic_block));
83 static rtx noce_get_condition PARAMS ((rtx, rtx *));
84 static int noce_operand_ok PARAMS ((rtx));
85 static int noce_process_if_block PARAMS ((basic_block, basic_block,
86 basic_block, basic_block));
88 static int process_if_block PARAMS ((basic_block, basic_block,
89 basic_block, basic_block));
90 static void merge_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
93 static int find_if_header PARAMS ((basic_block));
94 static int find_if_block PARAMS ((basic_block, edge, edge));
95 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
96 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
97 static int find_memory PARAMS ((rtx *, void *));
98 static int dead_or_predicable PARAMS ((basic_block, basic_block,
99 basic_block, rtx, int));
100 static void noce_emit_move_insn PARAMS ((rtx, rtx));
102 /* Abuse the basic_block AUX field to store the original block index,
103 as well as a flag indicating that the block should be rescaned for
104 life analysis. */
106 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
107 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
108 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
109 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
112 /* Count the number of non-jump active insns in BB. */
114 static int
115 count_bb_insns (bb)
116 basic_block bb;
118 int count = 0;
119 rtx insn = bb->head;
121 while (1)
123 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
124 count++;
126 if (insn == bb->end)
127 break;
128 insn = NEXT_INSN (insn);
131 return count;
134 /* Return the first non-jump active insn in the basic block. */
136 static rtx
137 first_active_insn (bb)
138 basic_block bb;
140 rtx insn = bb->head;
142 if (GET_CODE (insn) == CODE_LABEL)
144 if (insn == bb->end)
145 return NULL_RTX;
146 insn = NEXT_INSN (insn);
149 while (GET_CODE (insn) == NOTE)
151 if (insn == bb->end)
152 return NULL_RTX;
153 insn = NEXT_INSN (insn);
156 if (GET_CODE (insn) == JUMP_INSN)
157 return NULL_RTX;
159 return insn;
162 /* Return true if INSN is the last active non-jump insn in BB. */
164 static int
165 last_active_insn_p (bb, insn)
166 basic_block bb;
167 rtx insn;
171 if (insn == bb->end)
172 return TRUE;
173 insn = NEXT_INSN (insn);
175 while (GET_CODE (insn) == NOTE);
177 return GET_CODE (insn) == JUMP_INSN;
180 /* It is possible, especially when having dealt with multi-word
181 arithmetic, for the expanders to have emitted jumps. Search
182 through the sequence and return TRUE if a jump exists so that
183 we can abort the conversion. */
185 static int
186 seq_contains_jump (insn)
187 rtx insn;
189 while (insn)
191 if (GET_CODE (insn) == JUMP_INSN)
192 return 1;
193 insn = NEXT_INSN (insn);
195 return 0;
198 /* Go through a bunch of insns, converting them to conditional
199 execution format if possible. Return TRUE if all of the non-note
200 insns were processed. */
202 static int
203 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
204 rtx start; /* first insn to look at */
205 rtx end; /* last insn to look at */
206 rtx test; /* conditional execution test */
207 rtx prob_val; /* probability of branch taken. */
208 int mod_ok; /* true if modifications ok last insn. */
210 int must_be_last = FALSE;
211 rtx insn;
212 rtx pattern;
214 for (insn = start; ; insn = NEXT_INSN (insn))
216 if (GET_CODE (insn) == NOTE)
217 goto insn_done;
219 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
220 abort ();
222 /* Remove USE insns that get in the way. */
223 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
225 /* ??? Ug. Actually unlinking the thing is problematic,
226 given what we'd have to coordinate with our callers. */
227 PUT_CODE (insn, NOTE);
228 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
229 NOTE_SOURCE_FILE (insn) = 0;
230 goto insn_done;
233 /* Last insn wasn't last? */
234 if (must_be_last)
235 return FALSE;
237 if (modified_in_p (test, insn))
239 if (!mod_ok)
240 return FALSE;
241 must_be_last = TRUE;
244 /* Now build the conditional form of the instruction. */
245 pattern = PATTERN (insn);
247 /* If the machine needs to modify the insn being conditionally executed,
248 say for example to force a constant integer operand into a temp
249 register, do so here. */
250 #ifdef IFCVT_MODIFY_INSN
251 IFCVT_MODIFY_INSN (pattern, insn);
252 if (! pattern)
253 return FALSE;
254 #endif
256 validate_change (insn, &PATTERN (insn),
257 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
258 pattern), 1);
260 if (GET_CODE (insn) == CALL_INSN && prob_val)
261 validate_change (insn, &REG_NOTES (insn),
262 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
263 REG_NOTES (insn)), 1);
265 insn_done:
266 if (insn == end)
267 break;
270 return TRUE;
273 /* Return the condition for a jump. Do not do any special processing. */
275 static rtx
276 cond_exec_get_condition (jump)
277 rtx jump;
279 rtx test_if, cond;
281 if (any_condjump_p (jump))
282 test_if = SET_SRC (pc_set (jump));
283 else
284 return NULL_RTX;
285 cond = XEXP (test_if, 0);
287 /* If this branches to JUMP_LABEL when the condition is false,
288 reverse the condition. */
289 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
290 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
291 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
292 GET_MODE (cond), XEXP (cond, 0),
293 XEXP (cond, 1));
295 return cond;
298 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
299 to conditional execution. Return TRUE if we were successful at
300 converting the the block. */
302 static int
303 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
304 basic_block test_bb; /* Basic block test is in */
305 basic_block then_bb; /* Basic block for THEN block */
306 basic_block else_bb; /* Basic block for ELSE block */
307 basic_block join_bb; /* Basic block the join label is in */
309 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
310 rtx then_start; /* first insn in THEN block */
311 rtx then_end; /* last insn + 1 in THEN block */
312 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
313 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
314 int max; /* max # of insns to convert. */
315 int then_mod_ok; /* whether conditional mods are ok in THEN */
316 rtx true_expr; /* test for else block insns */
317 rtx false_expr; /* test for then block insns */
318 rtx true_prob_val; /* probability of else block */
319 rtx false_prob_val; /* probability of then block */
320 int n_insns;
322 /* Find the conditional jump to the ELSE or JOIN part, and isolate
323 the test. */
324 test_expr = cond_exec_get_condition (test_bb->end);
325 if (! test_expr)
326 return FALSE;
328 /* If the conditional jump is more than just a conditional jump,
329 then we can not do conditional execution conversion on this block. */
330 if (!onlyjump_p (test_bb->end))
331 return FALSE;
333 /* Collect the bounds of where we're to search. */
335 then_start = then_bb->head;
336 then_end = then_bb->end;
338 /* Skip a label heading THEN block. */
339 if (GET_CODE (then_start) == CODE_LABEL)
340 then_start = NEXT_INSN (then_start);
342 /* Skip a (use (const_int 0)) or branch as the final insn. */
343 if (GET_CODE (then_end) == INSN
344 && GET_CODE (PATTERN (then_end)) == USE
345 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
346 then_end = PREV_INSN (then_end);
347 else if (GET_CODE (then_end) == JUMP_INSN)
348 then_end = PREV_INSN (then_end);
350 if (else_bb)
352 /* Skip the ELSE block's label. */
353 else_start = NEXT_INSN (else_bb->head);
354 else_end = else_bb->end;
356 /* Skip a (use (const_int 0)) or branch as the final insn. */
357 if (GET_CODE (else_end) == INSN
358 && GET_CODE (PATTERN (else_end)) == USE
359 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
360 else_end = PREV_INSN (else_end);
361 else if (GET_CODE (else_end) == JUMP_INSN)
362 else_end = PREV_INSN (else_end);
365 /* How many instructions should we convert in total? */
366 n_insns = 0;
367 if (else_bb)
369 max = 2 * MAX_CONDITIONAL_EXECUTE;
370 n_insns = count_bb_insns (else_bb);
372 else
373 max = MAX_CONDITIONAL_EXECUTE;
374 n_insns += count_bb_insns (then_bb);
375 if (n_insns > max)
376 return FALSE;
378 /* Map test_expr/test_jump into the appropriate MD tests to use on
379 the conditionally executed code. */
381 true_expr = test_expr;
382 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
383 GET_MODE (true_expr), XEXP (true_expr, 0),
384 XEXP (true_expr, 1));
386 #ifdef IFCVT_MODIFY_TESTS
387 /* If the machine description needs to modify the tests, such as setting a
388 conditional execution register from a comparison, it can do so here. */
389 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
390 join_bb);
392 /* See if the conversion failed */
393 if (!true_expr || !false_expr)
394 goto fail;
395 #endif
397 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
398 if (true_prob_val)
400 true_prob_val = XEXP (true_prob_val, 0);
401 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
403 else
404 false_prob_val = NULL_RTX;
406 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
407 on then THEN block. */
408 then_mod_ok = (else_bb == NULL_BLOCK);
410 /* Go through the THEN and ELSE blocks converting the insns if possible
411 to conditional execution. */
413 if (then_end
414 && ! cond_exec_process_insns (then_start, then_end,
415 false_expr, false_prob_val, then_mod_ok))
416 goto fail;
418 if (else_bb
419 && ! cond_exec_process_insns (else_start, else_end,
420 true_expr, true_prob_val, TRUE))
421 goto fail;
423 if (! apply_change_group ())
424 return FALSE;
426 #ifdef IFCVT_MODIFY_FINAL
427 /* Do any machine dependent final modifications */
428 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
429 #endif
431 /* Conversion succeeded. */
432 if (rtl_dump_file)
433 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
434 n_insns, (n_insns == 1) ? " was" : "s were");
436 /* Merge the blocks! */
437 merge_if_block (test_bb, then_bb, else_bb, join_bb);
438 return TRUE;
440 fail:
441 #ifdef IFCVT_MODIFY_CANCEL
442 /* Cancel any machine dependent changes. */
443 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
444 #endif
446 cancel_changes (0);
447 return FALSE;
450 /* Used by noce_process_if_block to communicate with its subroutines.
452 The subroutines know that A and B may be evaluated freely. They
453 know that X is a register. They should insert new instructions
454 before cond_earliest. */
456 struct noce_if_info
458 basic_block test_bb;
459 rtx insn_a, insn_b;
460 rtx x, a, b;
461 rtx jump, cond, cond_earliest;
464 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
465 rtx, int, int));
466 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
467 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
468 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
469 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
470 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
471 rtx, enum rtx_code, rtx,
472 rtx, rtx, rtx));
473 static int noce_try_cmove PARAMS ((struct noce_if_info *));
474 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
475 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
476 rtx, rtx *));
477 static int noce_try_minmax PARAMS ((struct noce_if_info *));
478 static int noce_try_abs PARAMS ((struct noce_if_info *));
480 /* Helper function for noce_try_store_flag*. */
482 static rtx
483 noce_emit_store_flag (if_info, x, reversep, normalize)
484 struct noce_if_info *if_info;
485 rtx x;
486 int reversep, normalize;
488 rtx cond = if_info->cond;
489 int cond_complex;
490 enum rtx_code code;
492 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
493 || ! general_operand (XEXP (cond, 1), VOIDmode));
495 /* If earliest == jump, or when the condition is complex, try to
496 build the store_flag insn directly. */
498 if (cond_complex)
499 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
501 if (reversep)
502 code = reversed_comparison_code (cond, if_info->jump);
503 else
504 code = GET_CODE (cond);
506 if ((if_info->cond_earliest == if_info->jump || cond_complex)
507 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
509 rtx tmp;
511 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
512 XEXP (cond, 1));
513 tmp = gen_rtx_SET (VOIDmode, x, tmp);
515 start_sequence ();
516 tmp = emit_insn (tmp);
518 if (recog_memoized (tmp) >= 0)
520 tmp = get_insns ();
521 end_sequence ();
522 emit_insns (tmp);
524 if_info->cond_earliest = if_info->jump;
526 return x;
529 end_sequence ();
532 /* Don't even try if the comparison operands are weird. */
533 if (cond_complex)
534 return NULL_RTX;
536 return emit_store_flag (x, code, XEXP (cond, 0),
537 XEXP (cond, 1), VOIDmode,
538 (code == LTU || code == LEU
539 || code == GEU || code == GTU), normalize);
542 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
543 static void
544 noce_emit_move_insn (x, y)
545 rtx x, y;
547 enum machine_mode outmode, inmode;
548 rtx outer, inner;
549 int bitpos;
551 if (GET_CODE (x) != STRICT_LOW_PART)
553 emit_move_insn (x, y);
554 return;
557 outer = XEXP (x, 0);
558 inner = XEXP (outer, 0);
559 outmode = GET_MODE (outer);
560 inmode = GET_MODE (inner);
561 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
562 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
563 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
564 GET_MODE_BITSIZE (inmode));
567 /* Convert "if (test) x = 1; else x = 0".
569 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
570 tried in noce_try_store_flag_constants after noce_try_cmove has had
571 a go at the conversion. */
573 static int
574 noce_try_store_flag (if_info)
575 struct noce_if_info *if_info;
577 int reversep;
578 rtx target, seq;
580 if (GET_CODE (if_info->b) == CONST_INT
581 && INTVAL (if_info->b) == STORE_FLAG_VALUE
582 && if_info->a == const0_rtx)
583 reversep = 0;
584 else if (if_info->b == const0_rtx
585 && GET_CODE (if_info->a) == CONST_INT
586 && INTVAL (if_info->a) == STORE_FLAG_VALUE
587 && (reversed_comparison_code (if_info->cond, if_info->jump)
588 != UNKNOWN))
589 reversep = 1;
590 else
591 return FALSE;
593 start_sequence ();
595 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
596 if (target)
598 if (target != if_info->x)
599 noce_emit_move_insn (if_info->x, target);
601 seq = get_insns ();
602 end_sequence ();
603 emit_insns_before (seq, if_info->cond_earliest);
605 return TRUE;
607 else
609 end_sequence ();
610 return FALSE;
614 /* Convert "if (test) x = a; else x = b", for A and B constant. */
616 static int
617 noce_try_store_flag_constants (if_info)
618 struct noce_if_info *if_info;
620 rtx target, seq;
621 int reversep;
622 HOST_WIDE_INT itrue, ifalse, diff, tmp;
623 int normalize, can_reverse;
625 if (! no_new_pseudos
626 && GET_CODE (if_info->a) == CONST_INT
627 && GET_CODE (if_info->b) == CONST_INT)
629 ifalse = INTVAL (if_info->a);
630 itrue = INTVAL (if_info->b);
631 diff = itrue - ifalse;
633 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
634 != UNKNOWN);
636 reversep = 0;
637 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
638 normalize = 0;
639 else if (ifalse == 0 && exact_log2 (itrue) >= 0
640 && (STORE_FLAG_VALUE == 1
641 || BRANCH_COST >= 2))
642 normalize = 1;
643 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
644 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
645 normalize = 1, reversep = 1;
646 else if (itrue == -1
647 && (STORE_FLAG_VALUE == -1
648 || BRANCH_COST >= 2))
649 normalize = -1;
650 else if (ifalse == -1 && can_reverse
651 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
652 normalize = -1, reversep = 1;
653 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
654 || BRANCH_COST >= 3)
655 normalize = -1;
656 else
657 return FALSE;
659 if (reversep)
661 tmp = itrue; itrue = ifalse; ifalse = tmp;
662 diff = -diff;
665 start_sequence ();
666 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
667 if (! target)
669 end_sequence ();
670 return FALSE;
673 /* if (test) x = 3; else x = 4;
674 => x = 3 + (test == 0); */
675 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
677 target = expand_binop (GET_MODE (if_info->x),
678 (diff == STORE_FLAG_VALUE
679 ? add_optab : sub_optab),
680 GEN_INT (ifalse), target, if_info->x, 0,
681 OPTAB_WIDEN);
684 /* if (test) x = 8; else x = 0;
685 => x = (test != 0) << 3; */
686 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
688 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
689 target, GEN_INT (tmp), if_info->x, 0,
690 OPTAB_WIDEN);
693 /* if (test) x = -1; else x = b;
694 => x = -(test != 0) | b; */
695 else if (itrue == -1)
697 target = expand_binop (GET_MODE (if_info->x), ior_optab,
698 target, GEN_INT (ifalse), if_info->x, 0,
699 OPTAB_WIDEN);
702 /* if (test) x = a; else x = b;
703 => x = (-(test != 0) & (b - a)) + a; */
704 else
706 target = expand_binop (GET_MODE (if_info->x), and_optab,
707 target, GEN_INT (diff), if_info->x, 0,
708 OPTAB_WIDEN);
709 if (target)
710 target = expand_binop (GET_MODE (if_info->x), add_optab,
711 target, GEN_INT (ifalse), if_info->x, 0,
712 OPTAB_WIDEN);
715 if (! target)
717 end_sequence ();
718 return FALSE;
721 if (target != if_info->x)
722 noce_emit_move_insn (if_info->x, target);
724 seq = get_insns ();
725 end_sequence ();
727 if (seq_contains_jump (seq))
728 return FALSE;
730 emit_insns_before (seq, if_info->cond_earliest);
732 return TRUE;
735 return FALSE;
738 /* Convert "if (test) foo++" into "foo += (test != 0)", and
739 similarly for "foo--". */
741 static int
742 noce_try_store_flag_inc (if_info)
743 struct noce_if_info *if_info;
745 rtx target, seq;
746 int subtract, normalize;
748 if (! no_new_pseudos
749 && (BRANCH_COST >= 2
750 || HAVE_incscc
751 || HAVE_decscc)
752 /* Should be no `else' case to worry about. */
753 && if_info->b == if_info->x
754 && GET_CODE (if_info->a) == PLUS
755 && (XEXP (if_info->a, 1) == const1_rtx
756 || XEXP (if_info->a, 1) == constm1_rtx)
757 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
758 && (reversed_comparison_code (if_info->cond, if_info->jump)
759 != UNKNOWN))
761 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
762 subtract = 0, normalize = 0;
763 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
764 subtract = 1, normalize = 0;
765 else
766 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
768 start_sequence ();
770 target = noce_emit_store_flag (if_info,
771 gen_reg_rtx (GET_MODE (if_info->x)),
772 1, normalize);
774 if (target)
775 target = expand_binop (GET_MODE (if_info->x),
776 subtract ? sub_optab : add_optab,
777 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
778 if (target)
780 if (target != if_info->x)
781 noce_emit_move_insn (if_info->x, target);
783 seq = get_insns ();
784 end_sequence ();
786 if (seq_contains_jump (seq))
787 return FALSE;
789 emit_insns_before (seq, if_info->cond_earliest);
791 return TRUE;
794 end_sequence ();
797 return FALSE;
800 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
802 static int
803 noce_try_store_flag_mask (if_info)
804 struct noce_if_info *if_info;
806 rtx target, seq;
807 int reversep;
809 reversep = 0;
810 if (! no_new_pseudos
811 && (BRANCH_COST >= 2
812 || STORE_FLAG_VALUE == -1)
813 && ((if_info->a == const0_rtx
814 && rtx_equal_p (if_info->b, if_info->x))
815 || ((reversep = (reversed_comparison_code (if_info->cond,
816 if_info->jump)
817 != UNKNOWN))
818 && if_info->b == const0_rtx
819 && rtx_equal_p (if_info->a, if_info->x))))
821 start_sequence ();
822 target = noce_emit_store_flag (if_info,
823 gen_reg_rtx (GET_MODE (if_info->x)),
824 reversep, -1);
825 if (target)
826 target = expand_binop (GET_MODE (if_info->x), and_optab,
827 if_info->x, target, if_info->x, 0,
828 OPTAB_WIDEN);
830 if (target)
832 if (target != if_info->x)
833 noce_emit_move_insn (if_info->x, target);
835 seq = get_insns ();
836 end_sequence ();
838 if (seq_contains_jump (seq))
839 return FALSE;
841 emit_insns_before (seq, if_info->cond_earliest);
843 return TRUE;
846 end_sequence ();
849 return FALSE;
852 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
854 static rtx
855 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
856 struct noce_if_info *if_info;
857 rtx x, cmp_a, cmp_b, vfalse, vtrue;
858 enum rtx_code code;
860 /* If earliest == jump, try to build the cmove insn directly.
861 This is helpful when combine has created some complex condition
862 (like for alpha's cmovlbs) that we can't hope to regenerate
863 through the normal interface. */
865 if (if_info->cond_earliest == if_info->jump)
867 rtx tmp;
869 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
870 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
871 tmp = gen_rtx_SET (VOIDmode, x, tmp);
873 start_sequence ();
874 tmp = emit_insn (tmp);
876 if (recog_memoized (tmp) >= 0)
878 tmp = get_insns ();
879 end_sequence ();
880 emit_insns (tmp);
882 return x;
885 end_sequence ();
888 /* Don't even try if the comparison operands are weird. */
889 if (! general_operand (cmp_a, GET_MODE (cmp_a))
890 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
891 return NULL_RTX;
893 #if HAVE_conditional_move
894 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
895 vtrue, vfalse, GET_MODE (x),
896 (code == LTU || code == GEU
897 || code == LEU || code == GTU));
898 #else
899 /* We'll never get here, as noce_process_if_block doesn't call the
900 functions involved. Ifdef code, however, should be discouraged
901 because it leads to typos in the code not selected. However,
902 emit_conditional_move won't exist either. */
903 return NULL_RTX;
904 #endif
907 /* Try only simple constants and registers here. More complex cases
908 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
909 has had a go at it. */
911 static int
912 noce_try_cmove (if_info)
913 struct noce_if_info *if_info;
915 enum rtx_code code;
916 rtx target, seq;
918 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
919 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
921 start_sequence ();
923 code = GET_CODE (if_info->cond);
924 target = noce_emit_cmove (if_info, if_info->x, code,
925 XEXP (if_info->cond, 0),
926 XEXP (if_info->cond, 1),
927 if_info->a, if_info->b);
929 if (target)
931 if (target != if_info->x)
932 noce_emit_move_insn (if_info->x, target);
934 seq = get_insns ();
935 end_sequence ();
936 emit_insns_before (seq, if_info->cond_earliest);
937 return TRUE;
939 else
941 end_sequence ();
942 return FALSE;
946 return FALSE;
949 /* Try more complex cases involving conditional_move. */
951 static int
952 noce_try_cmove_arith (if_info)
953 struct noce_if_info *if_info;
955 rtx a = if_info->a;
956 rtx b = if_info->b;
957 rtx x = if_info->x;
958 rtx insn_a, insn_b;
959 rtx tmp, target;
960 int is_mem = 0;
961 enum rtx_code code;
963 /* A conditional move from two memory sources is equivalent to a
964 conditional on their addresses followed by a load. Don't do this
965 early because it'll screw alias analysis. Note that we've
966 already checked for no side effects. */
967 if (! no_new_pseudos && cse_not_expected
968 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
969 && BRANCH_COST >= 5)
971 a = XEXP (a, 0);
972 b = XEXP (b, 0);
973 x = gen_reg_rtx (Pmode);
974 is_mem = 1;
977 /* ??? We could handle this if we knew that a load from A or B could
978 not fault. This is also true if we've already loaded
979 from the address along the path from ENTRY. */
980 else if (may_trap_p (a) || may_trap_p (b))
981 return FALSE;
983 /* if (test) x = a + b; else x = c - d;
984 => y = a + b;
985 x = c - d;
986 if (test)
987 x = y;
990 code = GET_CODE (if_info->cond);
991 insn_a = if_info->insn_a;
992 insn_b = if_info->insn_b;
994 /* Possibly rearrange operands to make things come out more natural. */
995 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
997 int reversep = 0;
998 if (rtx_equal_p (b, x))
999 reversep = 1;
1000 else if (general_operand (b, GET_MODE (b)))
1001 reversep = 1;
1003 if (reversep)
1005 code = reversed_comparison_code (if_info->cond, if_info->jump);
1006 tmp = a, a = b, b = tmp;
1007 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1011 start_sequence ();
1013 /* If either operand is complex, load it into a register first.
1014 The best way to do this is to copy the original insn. In this
1015 way we preserve any clobbers etc that the insn may have had.
1016 This is of course not possible in the IS_MEM case. */
1017 if (! general_operand (a, GET_MODE (a)))
1019 rtx set;
1021 if (no_new_pseudos)
1022 goto end_seq_and_fail;
1024 if (is_mem)
1026 tmp = gen_reg_rtx (GET_MODE (a));
1027 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1029 else if (! insn_a)
1030 goto end_seq_and_fail;
1031 else
1033 a = gen_reg_rtx (GET_MODE (a));
1034 tmp = copy_rtx (insn_a);
1035 set = single_set (tmp);
1036 SET_DEST (set) = a;
1037 tmp = emit_insn (PATTERN (tmp));
1039 if (recog_memoized (tmp) < 0)
1040 goto end_seq_and_fail;
1042 if (! general_operand (b, GET_MODE (b)))
1044 rtx set;
1046 if (no_new_pseudos)
1047 goto end_seq_and_fail;
1049 if (is_mem)
1051 tmp = gen_reg_rtx (GET_MODE (b));
1052 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1054 else if (! insn_b)
1055 goto end_seq_and_fail;
1056 else
1058 b = gen_reg_rtx (GET_MODE (b));
1059 tmp = copy_rtx (insn_b);
1060 set = single_set (tmp);
1061 SET_DEST (set) = b;
1062 tmp = emit_insn (PATTERN (tmp));
1064 if (recog_memoized (tmp) < 0)
1065 goto end_seq_and_fail;
1068 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1069 XEXP (if_info->cond, 1), a, b);
1071 if (! target)
1072 goto end_seq_and_fail;
1074 /* If we're handling a memory for above, emit the load now. */
1075 if (is_mem)
1077 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1079 /* Copy over flags as appropriate. */
1080 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1081 MEM_VOLATILE_P (tmp) = 1;
1082 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1083 MEM_IN_STRUCT_P (tmp) = 1;
1084 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1085 MEM_SCALAR_P (tmp) = 1;
1086 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1087 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1089 noce_emit_move_insn (if_info->x, tmp);
1091 else if (target != x)
1092 noce_emit_move_insn (x, target);
1094 tmp = get_insns ();
1095 end_sequence ();
1096 emit_insns_before (tmp, if_info->cond_earliest);
1097 return TRUE;
1099 end_seq_and_fail:
1100 end_sequence ();
1101 return FALSE;
1104 /* For most cases, the simplified condition we found is the best
1105 choice, but this is not the case for the min/max/abs transforms.
1106 For these we wish to know that it is A or B in the condition. */
1108 static rtx
1109 noce_get_alt_condition (if_info, target, earliest)
1110 struct noce_if_info *if_info;
1111 rtx target;
1112 rtx *earliest;
1114 rtx cond, set, insn;
1115 int reverse;
1117 /* If target is already mentioned in the known condition, return it. */
1118 if (reg_mentioned_p (target, if_info->cond))
1120 *earliest = if_info->cond_earliest;
1121 return if_info->cond;
1124 set = pc_set (if_info->jump);
1125 cond = XEXP (SET_SRC (set), 0);
1126 reverse
1127 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1128 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1130 cond = canonicalize_condition (if_info->jump, cond, reverse,
1131 earliest, target);
1132 if (! cond || ! reg_mentioned_p (target, cond))
1133 return NULL;
1135 /* We almost certainly searched back to a different place.
1136 Need to re-verify correct lifetimes. */
1138 /* X may not be mentioned in the range (cond_earliest, jump]. */
1139 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1140 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1141 return NULL;
1143 /* A and B may not be modified in the range [cond_earliest, jump). */
1144 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1145 if (INSN_P (insn)
1146 && (modified_in_p (if_info->a, insn)
1147 || modified_in_p (if_info->b, insn)))
1148 return NULL;
1150 return cond;
1153 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1155 static int
1156 noce_try_minmax (if_info)
1157 struct noce_if_info *if_info;
1159 rtx cond, earliest, target, seq;
1160 enum rtx_code code;
1161 int unsignedp;
1162 optab op;
1164 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1165 if (no_new_pseudos)
1166 return FALSE;
1168 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1169 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1170 to get the target to tell us... */
1171 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1172 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1173 && ! flag_unsafe_math_optimizations)
1174 return FALSE;
1176 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1177 if (!cond)
1178 return FALSE;
1180 /* Verify the condition is of the form we expect, and canonicalize
1181 the comparison code. */
1182 code = GET_CODE (cond);
1183 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1185 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1186 return FALSE;
1188 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1190 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1191 return FALSE;
1192 code = swap_condition (code);
1194 else
1195 return FALSE;
1197 /* Determine what sort of operation this is. Note that the code is for
1198 a taken branch, so the code->operation mapping appears backwards. */
1199 switch (code)
1201 case LT:
1202 case LE:
1203 case UNLT:
1204 case UNLE:
1205 op = smax_optab;
1206 unsignedp = 0;
1207 break;
1208 case GT:
1209 case GE:
1210 case UNGT:
1211 case UNGE:
1212 op = smin_optab;
1213 unsignedp = 0;
1214 break;
1215 case LTU:
1216 case LEU:
1217 op = umax_optab;
1218 unsignedp = 1;
1219 break;
1220 case GTU:
1221 case GEU:
1222 op = umin_optab;
1223 unsignedp = 1;
1224 break;
1225 default:
1226 return FALSE;
1229 start_sequence ();
1231 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1232 if_info->x, unsignedp, OPTAB_WIDEN);
1233 if (! target)
1235 end_sequence ();
1236 return FALSE;
1238 if (target != if_info->x)
1239 noce_emit_move_insn (if_info->x, target);
1241 seq = get_insns ();
1242 end_sequence ();
1244 if (seq_contains_jump (seq))
1245 return FALSE;
1247 emit_insns_before (seq, earliest);
1248 if_info->cond = cond;
1249 if_info->cond_earliest = earliest;
1251 return TRUE;
1254 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1256 static int
1257 noce_try_abs (if_info)
1258 struct noce_if_info *if_info;
1260 rtx cond, earliest, target, seq, a, b, c;
1261 int negate;
1263 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1264 if (no_new_pseudos)
1265 return FALSE;
1267 /* Recognize A and B as constituting an ABS or NABS. */
1268 a = if_info->a;
1269 b = if_info->b;
1270 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1271 negate = 0;
1272 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1274 c = a; a = b; b = c;
1275 negate = 1;
1277 else
1278 return FALSE;
1280 cond = noce_get_alt_condition (if_info, b, &earliest);
1281 if (!cond)
1282 return FALSE;
1284 /* Verify the condition is of the form we expect. */
1285 if (rtx_equal_p (XEXP (cond, 0), b))
1286 c = XEXP (cond, 1);
1287 else if (rtx_equal_p (XEXP (cond, 1), b))
1288 c = XEXP (cond, 0);
1289 else
1290 return FALSE;
1292 /* Verify that C is zero. Search backward through the block for
1293 a REG_EQUAL note if necessary. */
1294 if (REG_P (c))
1296 rtx insn, note = NULL;
1297 for (insn = earliest;
1298 insn != if_info->test_bb->head;
1299 insn = PREV_INSN (insn))
1300 if (INSN_P (insn)
1301 && ((note = find_reg_note (insn, REG_EQUAL, c))
1302 || (note = find_reg_note (insn, REG_EQUIV, c))))
1303 break;
1304 if (! note)
1305 return FALSE;
1306 c = XEXP (note, 0);
1308 if (GET_CODE (c) == MEM
1309 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1310 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1311 c = get_pool_constant (XEXP (c, 0));
1313 /* Work around funny ideas get_condition has wrt canonicalization.
1314 Note that these rtx constants are known to be CONST_INT, and
1315 therefore imply integer comparisons. */
1316 if (c == constm1_rtx && GET_CODE (cond) == GT)
1318 else if (c == const1_rtx && GET_CODE (cond) == LT)
1320 else if (c != CONST0_RTX (GET_MODE (b)))
1321 return FALSE;
1323 /* Determine what sort of operation this is. */
1324 switch (GET_CODE (cond))
1326 case LT:
1327 case LE:
1328 case UNLT:
1329 case UNLE:
1330 negate = !negate;
1331 break;
1332 case GT:
1333 case GE:
1334 case UNGT:
1335 case UNGE:
1336 break;
1337 default:
1338 return FALSE;
1341 start_sequence ();
1343 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1345 /* ??? It's a quandry whether cmove would be better here, especially
1346 for integers. Perhaps combine will clean things up. */
1347 if (target && negate)
1348 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1350 if (! target)
1352 end_sequence ();
1353 return FALSE;
1356 if (target != if_info->x)
1357 noce_emit_move_insn (if_info->x, target);
1359 seq = get_insns ();
1360 end_sequence ();
1362 if (seq_contains_jump (seq))
1363 return FALSE;
1365 emit_insns_before (seq, earliest);
1366 if_info->cond = cond;
1367 if_info->cond_earliest = earliest;
1369 return TRUE;
1372 /* Look for the condition for the jump first. We'd prefer to avoid
1373 get_condition if we can -- it tries to look back for the contents
1374 of an original compare. On targets that use normal integers for
1375 comparisons, e.g. alpha, this is wasteful. */
1377 static rtx
1378 noce_get_condition (jump, earliest)
1379 rtx jump;
1380 rtx *earliest;
1382 rtx cond;
1383 rtx set;
1385 /* If the condition variable is a register and is MODE_INT, accept it.
1386 Otherwise, fall back on get_condition. */
1388 if (! any_condjump_p (jump))
1389 return NULL_RTX;
1391 set = pc_set (jump);
1393 cond = XEXP (SET_SRC (set), 0);
1394 if (GET_CODE (XEXP (cond, 0)) == REG
1395 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1397 *earliest = jump;
1399 /* If this branches to JUMP_LABEL when the condition is false,
1400 reverse the condition. */
1401 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1402 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1403 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1404 GET_MODE (cond), XEXP (cond, 0),
1405 XEXP (cond, 1));
1407 else
1408 cond = get_condition (jump, earliest);
1410 return cond;
1413 /* Return true if OP is ok for if-then-else processing. */
1415 static int
1416 noce_operand_ok (op)
1417 rtx op;
1419 /* We special-case memories, so handle any of them with
1420 no address side effects. */
1421 if (GET_CODE (op) == MEM)
1422 return ! side_effects_p (XEXP (op, 0));
1424 if (side_effects_p (op))
1425 return FALSE;
1427 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1428 being linked into the genfoo programs. This is probably a mistake.
1429 With finite operands, most fp operations don't trap. */
1430 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1431 switch (GET_CODE (op))
1433 case DIV:
1434 case MOD:
1435 case UDIV:
1436 case UMOD:
1437 /* ??? This is kinda lame -- almost every target will have forced
1438 the constant into a register first. But given the expense of
1439 division, this is probably for the best. */
1440 return (CONSTANT_P (XEXP (op, 1))
1441 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1442 && ! may_trap_p (XEXP (op, 0)));
1444 default:
1445 switch (GET_RTX_CLASS (GET_CODE (op)))
1447 case '1':
1448 return ! may_trap_p (XEXP (op, 0));
1449 case 'c':
1450 case '2':
1451 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1453 break;
1456 return ! may_trap_p (op);
1459 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1460 without using conditional execution. Return TRUE if we were
1461 successful at converting the the block. */
1463 static int
1464 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1465 basic_block test_bb; /* Basic block test is in */
1466 basic_block then_bb; /* Basic block for THEN block */
1467 basic_block else_bb; /* Basic block for ELSE block */
1468 basic_block join_bb; /* Basic block the join label is in */
1470 /* We're looking for patterns of the form
1472 (1) if (...) x = a; else x = b;
1473 (2) x = b; if (...) x = a;
1474 (3) if (...) x = a; // as if with an initial x = x.
1476 The later patterns require jumps to be more expensive.
1478 ??? For future expansion, look for multiple X in such patterns. */
1480 struct noce_if_info if_info;
1481 rtx insn_a, insn_b;
1482 rtx set_a, set_b;
1483 rtx orig_x, x, a, b;
1484 rtx jump, cond, insn;
1486 /* If this is not a standard conditional jump, we can't parse it. */
1487 jump = test_bb->end;
1488 cond = noce_get_condition (jump, &if_info.cond_earliest);
1489 if (! cond)
1490 return FALSE;
1492 /* If the conditional jump is more than just a conditional jump,
1493 then we can not do if-conversion on this block. */
1494 if (! onlyjump_p (jump))
1495 return FALSE;
1497 /* We must be comparing objects whose modes imply the size. */
1498 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1499 return FALSE;
1501 /* Look for one of the potential sets. */
1502 insn_a = first_active_insn (then_bb);
1503 if (! insn_a
1504 || ! last_active_insn_p (then_bb, insn_a)
1505 || (set_a = single_set (insn_a)) == NULL_RTX)
1506 return FALSE;
1508 x = SET_DEST (set_a);
1509 a = SET_SRC (set_a);
1511 /* Look for the other potential set. Make sure we've got equivalent
1512 destinations. */
1513 /* ??? This is overconservative. Storing to two different mems is
1514 as easy as conditionally computing the address. Storing to a
1515 single mem merely requires a scratch memory to use as one of the
1516 destination addresses; often the memory immediately below the
1517 stack pointer is available for this. */
1518 set_b = NULL_RTX;
1519 if (else_bb)
1521 insn_b = first_active_insn (else_bb);
1522 if (! insn_b
1523 || ! last_active_insn_p (else_bb, insn_b)
1524 || (set_b = single_set (insn_b)) == NULL_RTX
1525 || ! rtx_equal_p (x, SET_DEST (set_b)))
1526 return FALSE;
1528 else
1530 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1531 if (! insn_b
1532 || GET_CODE (insn_b) != INSN
1533 || (set_b = single_set (insn_b)) == NULL_RTX
1534 || ! rtx_equal_p (x, SET_DEST (set_b))
1535 || reg_mentioned_p (x, cond)
1536 || reg_mentioned_p (x, a)
1537 || reg_mentioned_p (x, SET_SRC (set_b)))
1538 insn_b = set_b = NULL_RTX;
1540 b = (set_b ? SET_SRC (set_b) : x);
1542 /* X may not be mentioned in the range (cond_earliest, jump]. */
1543 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1544 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1545 return FALSE;
1547 /* A and B may not be modified in the range [cond_earliest, jump). */
1548 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1549 if (INSN_P (insn)
1550 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1551 return FALSE;
1553 /* Only operate on register destinations, and even then avoid extending
1554 the lifetime of hard registers on small register class machines. */
1555 orig_x = x;
1556 if (GET_CODE (x) != REG
1557 || (SMALL_REGISTER_CLASSES
1558 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1560 if (no_new_pseudos)
1561 return FALSE;
1562 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1563 ? XEXP (x, 0) : x));
1566 /* Don't operate on sources that may trap or are volatile. */
1567 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1568 return FALSE;
1570 /* Set up the info block for our subroutines. */
1571 if_info.test_bb = test_bb;
1572 if_info.cond = cond;
1573 if_info.jump = jump;
1574 if_info.insn_a = insn_a;
1575 if_info.insn_b = insn_b;
1576 if_info.x = x;
1577 if_info.a = a;
1578 if_info.b = b;
1580 /* Try optimizations in some approximation of a useful order. */
1581 /* ??? Should first look to see if X is live incoming at all. If it
1582 isn't, we don't need anything but an unconditional set. */
1584 /* Look and see if A and B are really the same. Avoid creating silly
1585 cmove constructs that no one will fix up later. */
1586 if (rtx_equal_p (a, b))
1588 /* If we have an INSN_B, we don't have to create any new rtl. Just
1589 move the instruction that we already have. If we don't have an
1590 INSN_B, that means that A == X, and we've got a noop move. In
1591 that case don't do anything and let the code below delete INSN_A. */
1592 if (insn_b && else_bb)
1594 rtx note;
1596 if (else_bb && insn_b == else_bb->end)
1597 else_bb->end = PREV_INSN (insn_b);
1598 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1600 /* If there was a REG_EQUAL note, delete it since it may have been
1601 true due to this insn being after a jump. */
1602 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1603 remove_note (insn_b, note);
1605 insn_b = NULL_RTX;
1607 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1608 x must be executed twice. */
1609 else if (insn_b && side_effects_p (orig_x))
1610 return FALSE;
1612 x = orig_x;
1613 goto success;
1616 if (noce_try_store_flag (&if_info))
1617 goto success;
1618 if (noce_try_minmax (&if_info))
1619 goto success;
1620 if (noce_try_abs (&if_info))
1621 goto success;
1622 if (HAVE_conditional_move
1623 && noce_try_cmove (&if_info))
1624 goto success;
1625 if (! HAVE_conditional_execution)
1627 if (noce_try_store_flag_constants (&if_info))
1628 goto success;
1629 if (noce_try_store_flag_inc (&if_info))
1630 goto success;
1631 if (noce_try_store_flag_mask (&if_info))
1632 goto success;
1633 if (HAVE_conditional_move
1634 && noce_try_cmove_arith (&if_info))
1635 goto success;
1638 return FALSE;
1640 success:
1641 /* The original sets may now be killed. */
1642 if (insn_a == then_bb->end)
1643 then_bb->end = PREV_INSN (insn_a);
1644 flow_delete_insn (insn_a);
1646 /* Several special cases here: First, we may have reused insn_b above,
1647 in which case insn_b is now NULL. Second, we want to delete insn_b
1648 if it came from the ELSE block, because follows the now correct
1649 write that appears in the TEST block. However, if we got insn_b from
1650 the TEST block, it may in fact be loading data needed for the comparison.
1651 We'll let life_analysis remove the insn if it's really dead. */
1652 if (insn_b && else_bb)
1654 if (insn_b == else_bb->end)
1655 else_bb->end = PREV_INSN (insn_b);
1656 flow_delete_insn (insn_b);
1659 /* The new insns will have been inserted before cond_earliest. We should
1660 be able to remove the jump with impunity, but the condition itself may
1661 have been modified by gcse to be shared across basic blocks. */
1662 test_bb->end = PREV_INSN (jump);
1663 flow_delete_insn (jump);
1665 /* If we used a temporary, fix it up now. */
1666 if (orig_x != x)
1668 start_sequence ();
1669 noce_emit_move_insn (orig_x, x);
1670 insn_b = gen_sequence ();
1671 end_sequence ();
1673 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1676 /* Merge the blocks! */
1677 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1679 return TRUE;
1682 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1683 straight line code. Return true if successful. */
1685 static int
1686 process_if_block (test_bb, then_bb, else_bb, join_bb)
1687 basic_block test_bb; /* Basic block test is in */
1688 basic_block then_bb; /* Basic block for THEN block */
1689 basic_block else_bb; /* Basic block for ELSE block */
1690 basic_block join_bb; /* Basic block the join label is in */
1692 if (! reload_completed
1693 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1694 return TRUE;
1696 if (HAVE_conditional_execution
1697 && reload_completed
1698 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1699 return TRUE;
1701 return FALSE;
1704 /* Merge the blocks and mark for local life update. */
1706 static void
1707 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1708 basic_block test_bb; /* Basic block test is in */
1709 basic_block then_bb; /* Basic block for THEN block */
1710 basic_block else_bb; /* Basic block for ELSE block */
1711 basic_block join_bb; /* Basic block the join label is in */
1713 basic_block combo_bb;
1715 /* All block merging is done into the lower block numbers. */
1717 combo_bb = test_bb;
1719 /* First merge TEST block into THEN block. This is a no-brainer since
1720 the THEN block did not have a code label to begin with. */
1722 if (combo_bb->global_live_at_end)
1723 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1724 merge_blocks_nomove (combo_bb, then_bb);
1725 num_removed_blocks++;
1727 /* The ELSE block, if it existed, had a label. That label count
1728 will almost always be zero, but odd things can happen when labels
1729 get their addresses taken. */
1730 if (else_bb)
1732 merge_blocks_nomove (combo_bb, else_bb);
1733 num_removed_blocks++;
1736 /* If there was no join block reported, that means it was not adjacent
1737 to the others, and so we cannot merge them. */
1739 if (! join_bb)
1741 /* The outgoing edge for the current COMBO block should already
1742 be correct. Verify this. */
1743 if (combo_bb->succ == NULL_EDGE)
1744 abort ();
1746 /* There should sill be a branch at the end of the THEN or ELSE
1747 blocks taking us to our final destination. */
1748 if (! simplejump_p (combo_bb->end)
1749 && ! returnjump_p (combo_bb->end))
1750 abort ();
1753 /* The JOIN block may have had quite a number of other predecessors too.
1754 Since we've already merged the TEST, THEN and ELSE blocks, we should
1755 have only one remaining edge from our if-then-else diamond. If there
1756 is more than one remaining edge, it must come from elsewhere. There
1757 may be zero incoming edges if the THEN block didn't actually join
1758 back up (as with a call to abort). */
1759 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1761 /* We can merge the JOIN. */
1762 if (combo_bb->global_live_at_end)
1763 COPY_REG_SET (combo_bb->global_live_at_end,
1764 join_bb->global_live_at_end);
1765 merge_blocks_nomove (combo_bb, join_bb);
1766 num_removed_blocks++;
1768 else
1770 /* We cannot merge the JOIN. */
1772 /* The outgoing edge for the current COMBO block should already
1773 be correct. Verify this. */
1774 if (combo_bb->succ->succ_next != NULL_EDGE
1775 || combo_bb->succ->dest != join_bb)
1776 abort ();
1778 /* Remove the jump and cruft from the end of the COMBO block. */
1779 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1782 /* Make sure we update life info properly. */
1783 SET_UPDATE_LIFE (combo_bb);
1785 num_updated_if_blocks++;
1788 /* Find a block ending in a simple IF condition. Return TRUE if
1789 we were able to transform it in some way. */
1791 static int
1792 find_if_header (test_bb)
1793 basic_block test_bb;
1795 edge then_edge;
1796 edge else_edge;
1798 /* The kind of block we're looking for has exactly two successors. */
1799 if ((then_edge = test_bb->succ) == NULL_EDGE
1800 || (else_edge = then_edge->succ_next) == NULL_EDGE
1801 || else_edge->succ_next != NULL_EDGE)
1802 return FALSE;
1804 /* Neither edge should be abnormal. */
1805 if ((then_edge->flags & EDGE_COMPLEX)
1806 || (else_edge->flags & EDGE_COMPLEX))
1807 return FALSE;
1809 /* The THEN edge is canonically the one that falls through. */
1810 if (then_edge->flags & EDGE_FALLTHRU)
1812 else if (else_edge->flags & EDGE_FALLTHRU)
1814 edge e = else_edge;
1815 else_edge = then_edge;
1816 then_edge = e;
1818 else
1819 /* Otherwise this must be a multiway branch of some sort. */
1820 return FALSE;
1822 if (find_if_block (test_bb, then_edge, else_edge))
1823 goto success;
1824 if (post_dominators
1825 && (! HAVE_conditional_execution || reload_completed))
1827 if (find_if_case_1 (test_bb, then_edge, else_edge))
1828 goto success;
1829 if (find_if_case_2 (test_bb, then_edge, else_edge))
1830 goto success;
1833 return FALSE;
1835 success:
1836 if (rtl_dump_file)
1837 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1838 return TRUE;
1841 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1842 block. If so, we'll try to convert the insns to not require the branch.
1843 Return TRUE if we were successful at converting the the block. */
1845 static int
1846 find_if_block (test_bb, then_edge, else_edge)
1847 basic_block test_bb;
1848 edge then_edge, else_edge;
1850 basic_block then_bb = then_edge->dest;
1851 basic_block else_bb = else_edge->dest;
1852 basic_block join_bb = NULL_BLOCK;
1853 edge then_succ = then_bb->succ;
1854 edge else_succ = else_bb->succ;
1855 int next_index;
1857 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1858 if (then_bb->pred->pred_next != NULL_EDGE)
1859 return FALSE;
1861 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1862 if (then_succ != NULL_EDGE
1863 && (then_succ->succ_next != NULL_EDGE
1864 || (then_succ->flags & EDGE_COMPLEX)))
1865 return FALSE;
1867 /* If the THEN block has no successors, conditional execution can still
1868 make a conditional call. Don't do this unless the ELSE block has
1869 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1870 Check for the last insn of the THEN block being an indirect jump, which
1871 is listed as not having any successors, but confuses the rest of the CE
1872 code processing. XXX we should fix this in the future. */
1873 if (then_succ == NULL)
1875 if (else_bb->pred->pred_next == NULL_EDGE)
1877 rtx last_insn = then_bb->end;
1879 while (last_insn
1880 && GET_CODE (last_insn) == NOTE
1881 && last_insn != then_bb->head)
1882 last_insn = PREV_INSN (last_insn);
1884 if (last_insn
1885 && GET_CODE (last_insn) == JUMP_INSN
1886 && ! simplejump_p (last_insn))
1887 return FALSE;
1889 join_bb = else_bb;
1890 else_bb = NULL_BLOCK;
1892 else
1893 return FALSE;
1896 /* If the THEN block's successor is the other edge out of the TEST block,
1897 then we have an IF-THEN combo without an ELSE. */
1898 else if (then_succ->dest == else_bb)
1900 join_bb = else_bb;
1901 else_bb = NULL_BLOCK;
1904 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1905 has exactly one predecessor and one successor, and the outgoing edge
1906 is not complex, then we have an IF-THEN-ELSE combo. */
1907 else if (else_succ != NULL_EDGE
1908 && then_succ->dest == else_succ->dest
1909 && else_bb->pred->pred_next == NULL_EDGE
1910 && else_succ->succ_next == NULL_EDGE
1911 && ! (else_succ->flags & EDGE_COMPLEX))
1912 join_bb = else_succ->dest;
1914 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1915 else
1916 return FALSE;
1918 num_possible_if_blocks++;
1920 if (rtl_dump_file)
1922 if (else_bb)
1923 fprintf (rtl_dump_file,
1924 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1925 test_bb->index, then_bb->index, else_bb->index,
1926 join_bb->index);
1927 else
1928 fprintf (rtl_dump_file,
1929 "\nIF-THEN block found, start %d, then %d, join %d\n",
1930 test_bb->index, then_bb->index, join_bb->index);
1933 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1934 get the first condition for free, since we've already asserted that
1935 there's a fallthru edge from IF to THEN. */
1936 /* ??? As an enhancement, move the ELSE block. Have to deal with
1937 BLOCK notes, if by no other means than aborting the merge if they
1938 exist. Sticky enough I don't want to think about it now. */
1939 next_index = then_bb->index;
1940 if (else_bb && ++next_index != else_bb->index)
1941 return FALSE;
1942 if (++next_index != join_bb->index)
1944 if (else_bb)
1945 join_bb = NULL;
1946 else
1947 return FALSE;
1950 /* Do the real work. */
1951 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1954 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1955 transformable, but not necessarily the other. There need be no
1956 JOIN block.
1958 Return TRUE if we were successful at converting the the block.
1960 Cases we'd like to look at:
1963 if (test) goto over; // x not live
1964 x = a;
1965 goto label;
1966 over:
1968 becomes
1970 x = a;
1971 if (! test) goto label;
1974 if (test) goto E; // x not live
1975 x = big();
1976 goto L;
1978 x = b;
1979 goto M;
1981 becomes
1983 x = b;
1984 if (test) goto M;
1985 x = big();
1986 goto L;
1988 (3) // This one's really only interesting for targets that can do
1989 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1990 // it results in multiple branches on a cache line, which often
1991 // does not sit well with predictors.
1993 if (test1) goto E; // predicted not taken
1994 x = a;
1995 if (test2) goto F;
1998 x = b;
2001 becomes
2003 x = a;
2004 if (test1) goto E;
2005 if (test2) goto F;
2007 Notes:
2009 (A) Don't do (2) if the branch is predicted against the block we're
2010 eliminating. Do it anyway if we can eliminate a branch; this requires
2011 that the sole successor of the eliminated block postdominate the other
2012 side of the if.
2014 (B) With CE, on (3) we can steal from both sides of the if, creating
2016 if (test1) x = a;
2017 if (!test1) x = b;
2018 if (test1) goto J;
2019 if (test2) goto F;
2023 Again, this is most useful if J postdominates.
2025 (C) CE substitutes for helpful life information.
2027 (D) These heuristics need a lot of work. */
2029 /* Tests for case 1 above. */
2031 static int
2032 find_if_case_1 (test_bb, then_edge, else_edge)
2033 basic_block test_bb;
2034 edge then_edge, else_edge;
2036 basic_block then_bb = then_edge->dest;
2037 basic_block else_bb = else_edge->dest;
2038 edge then_succ = then_bb->succ;
2039 rtx new_lab;
2041 /* THEN has one successor. */
2042 if (!then_succ || then_succ->succ_next != NULL)
2043 return FALSE;
2045 /* THEN does not fall through, but is not strange either. */
2046 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2047 return FALSE;
2049 /* THEN has one predecessor. */
2050 if (then_bb->pred->pred_next != NULL)
2051 return FALSE;
2053 /* ELSE follows THEN. (??? could be moved) */
2054 if (else_bb->index != then_bb->index + 1)
2055 return FALSE;
2057 num_possible_if_blocks++;
2058 if (rtl_dump_file)
2059 fprintf (rtl_dump_file,
2060 "\nIF-CASE-1 found, start %d, then %d\n",
2061 test_bb->index, then_bb->index);
2063 /* THEN is small. */
2064 if (count_bb_insns (then_bb) > BRANCH_COST)
2065 return FALSE;
2067 /* Find the label for THEN's destination. */
2068 if (then_succ->dest == EXIT_BLOCK_PTR)
2069 new_lab = NULL_RTX;
2070 else
2072 new_lab = JUMP_LABEL (then_bb->end);
2073 if (! new_lab)
2074 abort ();
2077 /* Registers set are dead, or are predicable. */
2078 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2079 return FALSE;
2081 /* Conversion went ok, including moving the insns and fixing up the
2082 jump. Adjust the CFG to match. */
2084 SET_UPDATE_LIFE (test_bb);
2085 bitmap_operation (test_bb->global_live_at_end,
2086 else_bb->global_live_at_start,
2087 then_bb->global_live_at_end, BITMAP_IOR);
2089 make_edge (NULL, test_bb, then_succ->dest, 0);
2090 flow_delete_block (then_bb);
2091 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2093 num_removed_blocks++;
2094 num_updated_if_blocks++;
2096 return TRUE;
2099 /* Test for case 2 above. */
2101 static int
2102 find_if_case_2 (test_bb, then_edge, else_edge)
2103 basic_block test_bb;
2104 edge then_edge, else_edge;
2106 basic_block then_bb = then_edge->dest;
2107 basic_block else_bb = else_edge->dest;
2108 edge else_succ = else_bb->succ;
2109 rtx new_lab, note;
2111 /* ELSE has one successor. */
2112 if (!else_succ || else_succ->succ_next != NULL)
2113 return FALSE;
2115 /* ELSE outgoing edge is not complex. */
2116 if (else_succ->flags & EDGE_COMPLEX)
2117 return FALSE;
2119 /* ELSE has one predecessor. */
2120 if (else_bb->pred->pred_next != NULL)
2121 return FALSE;
2123 /* THEN is not EXIT. */
2124 if (then_bb->index < 0)
2125 return FALSE;
2127 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2128 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2129 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2131 else if (else_succ->dest->index < 0
2132 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2133 ORIG_INDEX (else_succ->dest)))
2135 else
2136 return FALSE;
2138 num_possible_if_blocks++;
2139 if (rtl_dump_file)
2140 fprintf (rtl_dump_file,
2141 "\nIF-CASE-2 found, start %d, else %d\n",
2142 test_bb->index, else_bb->index);
2144 /* ELSE is small. */
2145 if (count_bb_insns (then_bb) > BRANCH_COST)
2146 return FALSE;
2148 /* Find the label for ELSE's destination. */
2149 if (else_succ->dest == EXIT_BLOCK_PTR)
2150 new_lab = NULL_RTX;
2151 else
2153 if (else_succ->flags & EDGE_FALLTHRU)
2155 new_lab = else_succ->dest->head;
2156 if (GET_CODE (new_lab) != CODE_LABEL)
2157 abort ();
2159 else
2161 new_lab = JUMP_LABEL (else_bb->end);
2162 if (! new_lab)
2163 abort ();
2167 /* Registers set are dead, or are predicable. */
2168 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2169 return FALSE;
2171 /* Conversion went ok, including moving the insns and fixing up the
2172 jump. Adjust the CFG to match. */
2174 SET_UPDATE_LIFE (test_bb);
2175 bitmap_operation (test_bb->global_live_at_end,
2176 then_bb->global_live_at_start,
2177 else_bb->global_live_at_end, BITMAP_IOR);
2179 remove_edge (else_edge);
2180 make_edge (NULL, test_bb, else_succ->dest, 0);
2181 flow_delete_block (else_bb);
2183 num_removed_blocks++;
2184 num_updated_if_blocks++;
2186 /* ??? We may now fallthru from one of THEN's successors into a join
2187 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2189 return TRUE;
2192 /* A subroutine of dead_or_predicable called through for_each_rtx.
2193 Return 1 if a memory is found. */
2195 static int
2196 find_memory (px, data)
2197 rtx *px;
2198 void *data ATTRIBUTE_UNUSED;
2200 return GET_CODE (*px) == MEM;
2203 /* Used by the code above to perform the actual rtl transformations.
2204 Return TRUE if successful.
2206 TEST_BB is the block containing the conditional branch. MERGE_BB
2207 is the block containing the code to manipulate. NEW_DEST is the
2208 label TEST_BB should be branching to after the conversion.
2209 REVERSEP is true if the sense of the branch should be reversed. */
2211 static int
2212 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2213 basic_block test_bb, merge_bb, other_bb;
2214 rtx new_dest;
2215 int reversep;
2217 rtx head, end, jump, earliest, old_dest;
2219 jump = test_bb->end;
2221 /* Find the extent of the real code in the merge block. */
2222 head = merge_bb->head;
2223 end = merge_bb->end;
2225 if (GET_CODE (head) == CODE_LABEL)
2226 head = NEXT_INSN (head);
2227 if (GET_CODE (head) == NOTE)
2229 if (head == end)
2231 head = end = NULL_RTX;
2232 goto no_body;
2234 head = NEXT_INSN (head);
2237 if (GET_CODE (end) == JUMP_INSN)
2239 if (head == end)
2241 head = end = NULL_RTX;
2242 goto no_body;
2244 end = PREV_INSN (end);
2247 /* Disable handling dead code by conditional execution if the machine needs
2248 to do anything funny with the tests, etc. */
2249 #ifndef IFCVT_MODIFY_TESTS
2250 if (HAVE_conditional_execution)
2252 /* In the conditional execution case, we have things easy. We know
2253 the condition is reversable. We don't have to check life info,
2254 becase we're going to conditionally execute the code anyway.
2255 All that's left is making sure the insns involved can actually
2256 be predicated. */
2258 rtx cond, prob_val;
2260 cond = cond_exec_get_condition (jump);
2262 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2263 if (prob_val)
2264 prob_val = XEXP (prob_val, 0);
2266 if (reversep)
2268 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2269 GET_MODE (cond), XEXP (cond, 0),
2270 XEXP (cond, 1));
2271 if (prob_val)
2272 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2275 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2276 goto cancel;
2278 earliest = jump;
2280 else
2281 #endif
2283 /* In the non-conditional execution case, we have to verify that there
2284 are no trapping operations, no calls, no references to memory, and
2285 that any registers modified are dead at the branch site. */
2287 rtx insn, cond, prev;
2288 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2289 regset merge_set, tmp, test_live, test_set;
2290 struct propagate_block_info *pbi;
2291 int i, fail = 0;
2293 /* Check for no calls or trapping operations. */
2294 for (insn = head; ; insn = NEXT_INSN (insn))
2296 if (GET_CODE (insn) == CALL_INSN)
2297 return FALSE;
2298 if (INSN_P (insn))
2300 if (may_trap_p (PATTERN (insn)))
2301 return FALSE;
2303 /* ??? Even non-trapping memories such as stack frame
2304 references must be avoided. For stores, we collect
2305 no lifetime info; for reads, we'd have to assert
2306 true_dependance false against every store in the
2307 TEST range. */
2308 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2309 return FALSE;
2311 if (insn == end)
2312 break;
2315 if (! any_condjump_p (jump))
2316 return FALSE;
2318 /* Find the extent of the conditional. */
2319 cond = noce_get_condition (jump, &earliest);
2320 if (! cond)
2321 return FALSE;
2323 /* Collect:
2324 MERGE_SET = set of registers set in MERGE_BB
2325 TEST_LIVE = set of registers live at EARLIEST
2326 TEST_SET = set of registers set between EARLIEST and the
2327 end of the block. */
2329 tmp = INITIALIZE_REG_SET (tmp_head);
2330 merge_set = INITIALIZE_REG_SET (merge_set_head);
2331 test_live = INITIALIZE_REG_SET (test_live_head);
2332 test_set = INITIALIZE_REG_SET (test_set_head);
2334 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2335 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2336 since we've already asserted that MERGE_BB is small. */
2337 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2339 /* For small register class machines, don't lengthen lifetimes of
2340 hard registers before reload. */
2341 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2343 EXECUTE_IF_SET_IN_BITMAP
2344 (merge_set, 0, i,
2346 if (i < FIRST_PSEUDO_REGISTER
2347 && ! fixed_regs[i]
2348 && ! global_regs[i])
2349 fail = 1;
2353 /* For TEST, we're interested in a range of insns, not a whole block.
2354 Moreover, we're interested in the insns live from OTHER_BB. */
2356 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2357 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2360 for (insn = jump; ; insn = prev)
2362 prev = propagate_one_insn (pbi, insn);
2363 if (insn == earliest)
2364 break;
2367 free_propagate_block_info (pbi);
2369 /* We can perform the transformation if
2370 MERGE_SET & (TEST_SET | TEST_LIVE)
2372 TEST_SET & merge_bb->global_live_at_start
2373 are empty. */
2375 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2376 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2377 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2379 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2380 BITMAP_AND);
2381 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2383 FREE_REG_SET (tmp);
2384 FREE_REG_SET (merge_set);
2385 FREE_REG_SET (test_live);
2386 FREE_REG_SET (test_set);
2388 if (fail)
2389 return FALSE;
2392 no_body:
2393 /* We don't want to use normal invert_jump or redirect_jump because
2394 we don't want to delete_insn called. Also, we want to do our own
2395 change group management. */
2397 old_dest = JUMP_LABEL (jump);
2398 if (reversep
2399 ? ! invert_jump_1 (jump, new_dest)
2400 : ! redirect_jump_1 (jump, new_dest))
2401 goto cancel;
2403 if (! apply_change_group ())
2404 return FALSE;
2406 if (old_dest)
2407 LABEL_NUSES (old_dest) -= 1;
2408 if (new_dest)
2409 LABEL_NUSES (new_dest) += 1;
2410 JUMP_LABEL (jump) = new_dest;
2412 if (reversep)
2414 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2415 if (note)
2416 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2419 /* Move the insns out of MERGE_BB to before the branch. */
2420 if (head != NULL)
2422 if (end == merge_bb->end)
2423 merge_bb->end = PREV_INSN (head);
2425 head = squeeze_notes (head, end);
2426 if (GET_CODE (end) == NOTE
2427 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2428 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2429 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2430 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2431 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2432 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2434 if (head == end)
2435 return TRUE;
2436 end = PREV_INSN (end);
2439 reorder_insns (head, end, PREV_INSN (earliest));
2441 return TRUE;
2443 cancel:
2444 cancel_changes (0);
2445 return FALSE;
2448 /* Main entry point for all if-conversion. */
2450 void
2451 if_convert (life_data_ok)
2452 int life_data_ok;
2454 int block_num;
2456 num_possible_if_blocks = 0;
2457 num_updated_if_blocks = 0;
2458 num_removed_blocks = 0;
2460 /* Free up basic_block_for_insn so that we don't have to keep it
2461 up to date, either here or in merge_blocks_nomove. */
2462 free_basic_block_vars (1);
2464 /* Compute postdominators if we think we'll use them. */
2465 post_dominators = NULL;
2466 if (HAVE_conditional_execution || life_data_ok)
2468 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2469 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2472 /* Record initial block numbers. */
2473 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2474 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2476 /* Go through each of the basic blocks looking for things to convert. */
2477 for (block_num = 0; block_num < n_basic_blocks; )
2479 basic_block bb = BASIC_BLOCK (block_num);
2480 if (find_if_header (bb))
2481 block_num = bb->index;
2482 else
2483 block_num++;
2486 if (post_dominators)
2487 sbitmap_vector_free (post_dominators);
2489 if (rtl_dump_file)
2490 fflush (rtl_dump_file);
2492 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2493 compute_bb_for_insn (get_max_uid ());
2495 /* Rebuild life info for basic blocks that require it. */
2496 if (num_removed_blocks && life_data_ok)
2498 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2499 sbitmap_zero (update_life_blocks);
2501 /* If we allocated new pseudos, we must resize the array for sched1. */
2502 if (max_regno < max_reg_num ())
2504 max_regno = max_reg_num ();
2505 allocate_reg_info (max_regno, FALSE, FALSE);
2508 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2509 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2510 SET_BIT (update_life_blocks, block_num);
2512 count_or_remove_death_notes (update_life_blocks, 1);
2513 /* ??? See about adding a mode that verifies that the initial
2514 set of blocks don't let registers come live. */
2515 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2516 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2517 | PROP_KILL_DEAD_CODE);
2519 sbitmap_free (update_life_blocks);
2522 /* Write the final stats. */
2523 if (rtl_dump_file && num_possible_if_blocks > 0)
2525 fprintf (rtl_dump_file,
2526 "\n%d possible IF blocks searched.\n",
2527 num_possible_if_blocks);
2528 fprintf (rtl_dump_file,
2529 "%d IF blocks converted.\n",
2530 num_updated_if_blocks);
2531 fprintf (rtl_dump_file,
2532 "%d basic blocks deleted.\n\n\n",
2533 num_removed_blocks);
2536 #ifdef ENABLE_CHECKING
2537 verify_flow_info ();
2538 #endif