* $(HOST_PREFIX_1)errors.o, $(HOST_PREFIX_1)ggc-none.o,
[official-gcc.git] / gcc / ifcvt.c
blobf0df3da0371e1efdc03d48b9304c5b38ae92d731
1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "real.h"
34 #include "output.h"
35 #include "tm_p.h"
38 #ifndef HAVE_conditional_execution
39 #define HAVE_conditional_execution 0
40 #endif
41 #ifndef HAVE_conditional_move
42 #define HAVE_conditional_move 0
43 #endif
44 #ifndef HAVE_incscc
45 #define HAVE_incscc 0
46 #endif
47 #ifndef HAVE_decscc
48 #define HAVE_decscc 0
49 #endif
51 #ifndef MAX_CONDITIONAL_EXECUTE
52 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
53 #endif
55 #define NULL_EDGE ((struct edge_def *)NULL)
56 #define NULL_BLOCK ((struct basic_block_def *)NULL)
58 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
59 static int num_possible_if_blocks;
61 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
62 execution. */
63 static int num_updated_if_blocks;
65 /* # of basic blocks that were removed. */
66 static int num_removed_blocks;
68 /* The post-dominator relation on the original block numbers. */
69 static sbitmap *post_dominators;
71 /* Forward references. */
72 static int count_bb_insns PARAMS ((basic_block));
73 static rtx first_active_insn PARAMS ((basic_block));
74 static int last_active_insn_p PARAMS ((basic_block, rtx));
75 static int seq_contains_jump PARAMS ((rtx));
77 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
78 static rtx cond_exec_get_condition PARAMS ((rtx));
79 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
80 basic_block, basic_block));
82 static rtx noce_get_condition PARAMS ((rtx, rtx *));
83 static int noce_operand_ok PARAMS ((rtx));
84 static int noce_process_if_block PARAMS ((basic_block, basic_block,
85 basic_block, basic_block));
87 static int process_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
89 static void merge_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
92 static int find_if_header PARAMS ((basic_block));
93 static int find_if_block PARAMS ((basic_block, edge, edge));
94 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
95 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
96 static int find_memory PARAMS ((rtx *, void *));
97 static int dead_or_predicable PARAMS ((basic_block, basic_block,
98 basic_block, rtx, int));
99 static void noce_emit_move_insn PARAMS ((rtx, rtx));
101 /* Abuse the basic_block AUX field to store the original block index,
102 as well as a flag indicating that the block should be rescaned for
103 life analysis. */
105 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
106 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
107 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
108 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
111 /* Count the number of non-jump active insns in BB. */
113 static int
114 count_bb_insns (bb)
115 basic_block bb;
117 int count = 0;
118 rtx insn = bb->head;
120 while (1)
122 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
123 count++;
125 if (insn == bb->end)
126 break;
127 insn = NEXT_INSN (insn);
130 return count;
133 /* Return the first non-jump active insn in the basic block. */
135 static rtx
136 first_active_insn (bb)
137 basic_block bb;
139 rtx insn = bb->head;
141 if (GET_CODE (insn) == CODE_LABEL)
143 if (insn == bb->end)
144 return NULL_RTX;
145 insn = NEXT_INSN (insn);
148 while (GET_CODE (insn) == NOTE)
150 if (insn == bb->end)
151 return NULL_RTX;
152 insn = NEXT_INSN (insn);
155 if (GET_CODE (insn) == JUMP_INSN)
156 return NULL_RTX;
158 return insn;
161 /* Return true if INSN is the last active non-jump insn in BB. */
163 static int
164 last_active_insn_p (bb, insn)
165 basic_block bb;
166 rtx insn;
170 if (insn == bb->end)
171 return TRUE;
172 insn = NEXT_INSN (insn);
174 while (GET_CODE (insn) == NOTE);
176 return GET_CODE (insn) == JUMP_INSN;
179 /* It is possible, especially when having dealt with multi-word
180 arithmetic, for the expanders to have emitted jumps. Search
181 through the sequence and return TRUE if a jump exists so that
182 we can abort the conversion. */
184 static int
185 seq_contains_jump (insn)
186 rtx insn;
188 while (insn)
190 if (GET_CODE (insn) == JUMP_INSN)
191 return 1;
192 insn = NEXT_INSN (insn);
194 return 0;
197 /* Go through a bunch of insns, converting them to conditional
198 execution format if possible. Return TRUE if all of the non-note
199 insns were processed. */
201 static int
202 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
203 rtx start; /* first insn to look at */
204 rtx end; /* last insn to look at */
205 rtx test; /* conditional execution test */
206 rtx prob_val; /* probability of branch taken. */
207 int mod_ok; /* true if modifications ok last insn. */
209 int must_be_last = FALSE;
210 rtx insn;
211 rtx pattern;
213 for (insn = start; ; insn = NEXT_INSN (insn))
215 if (GET_CODE (insn) == NOTE)
216 goto insn_done;
218 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
219 abort ();
221 /* Remove USE insns that get in the way. */
222 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
224 /* ??? Ug. Actually unlinking the thing is problematic,
225 given what we'd have to coordinate with our callers. */
226 PUT_CODE (insn, NOTE);
227 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
228 NOTE_SOURCE_FILE (insn) = 0;
229 goto insn_done;
232 /* Last insn wasn't last? */
233 if (must_be_last)
234 return FALSE;
236 if (modified_in_p (test, insn))
238 if (!mod_ok)
239 return FALSE;
240 must_be_last = TRUE;
243 /* Now build the conditional form of the instruction. */
244 pattern = PATTERN (insn);
246 /* If the machine needs to modify the insn being conditionally executed,
247 say for example to force a constant integer operand into a temp
248 register, do so here. */
249 #ifdef IFCVT_MODIFY_INSN
250 IFCVT_MODIFY_INSN (pattern, insn);
251 if (! pattern)
252 return FALSE;
253 #endif
255 validate_change (insn, &PATTERN (insn),
256 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
257 pattern), 1);
259 if (GET_CODE (insn) == CALL_INSN && prob_val)
260 validate_change (insn, &REG_NOTES (insn),
261 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
262 REG_NOTES (insn)), 1);
264 insn_done:
265 if (insn == end)
266 break;
269 return TRUE;
272 /* Return the condition for a jump. Do not do any special processing. */
274 static rtx
275 cond_exec_get_condition (jump)
276 rtx jump;
278 rtx test_if, cond;
280 if (any_condjump_p (jump))
281 test_if = SET_SRC (pc_set (jump));
282 else
283 return NULL_RTX;
284 cond = XEXP (test_if, 0);
286 /* If this branches to JUMP_LABEL when the condition is false,
287 reverse the condition. */
288 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
289 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
290 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
291 GET_MODE (cond), XEXP (cond, 0),
292 XEXP (cond, 1));
294 return cond;
297 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
298 to conditional execution. Return TRUE if we were successful at
299 converting the the block. */
301 static int
302 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
303 basic_block test_bb; /* Basic block test is in */
304 basic_block then_bb; /* Basic block for THEN block */
305 basic_block else_bb; /* Basic block for ELSE block */
306 basic_block join_bb; /* Basic block the join label is in */
308 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
309 rtx then_start; /* first insn in THEN block */
310 rtx then_end; /* last insn + 1 in THEN block */
311 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
312 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
313 int max; /* max # of insns to convert. */
314 int then_mod_ok; /* whether conditional mods are ok in THEN */
315 rtx true_expr; /* test for else block insns */
316 rtx false_expr; /* test for then block insns */
317 rtx true_prob_val; /* probability of else block */
318 rtx false_prob_val; /* probability of then block */
319 int n_insns;
321 /* Find the conditional jump to the ELSE or JOIN part, and isolate
322 the test. */
323 test_expr = cond_exec_get_condition (test_bb->end);
324 if (! test_expr)
325 return FALSE;
327 /* If the conditional jump is more than just a conditional jump,
328 then we can not do conditional execution conversion on this block. */
329 if (!onlyjump_p (test_bb->end))
330 return FALSE;
332 /* Collect the bounds of where we're to search. */
334 then_start = then_bb->head;
335 then_end = then_bb->end;
337 /* Skip a label heading THEN block. */
338 if (GET_CODE (then_start) == CODE_LABEL)
339 then_start = NEXT_INSN (then_start);
341 /* Skip a (use (const_int 0)) or branch as the final insn. */
342 if (GET_CODE (then_end) == INSN
343 && GET_CODE (PATTERN (then_end)) == USE
344 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
345 then_end = PREV_INSN (then_end);
346 else if (GET_CODE (then_end) == JUMP_INSN)
347 then_end = PREV_INSN (then_end);
349 if (else_bb)
351 /* Skip the ELSE block's label. */
352 else_start = NEXT_INSN (else_bb->head);
353 else_end = else_bb->end;
355 /* Skip a (use (const_int 0)) or branch as the final insn. */
356 if (GET_CODE (else_end) == INSN
357 && GET_CODE (PATTERN (else_end)) == USE
358 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
359 else_end = PREV_INSN (else_end);
360 else if (GET_CODE (else_end) == JUMP_INSN)
361 else_end = PREV_INSN (else_end);
364 /* How many instructions should we convert in total? */
365 n_insns = 0;
366 if (else_bb)
368 max = 2 * MAX_CONDITIONAL_EXECUTE;
369 n_insns = count_bb_insns (else_bb);
371 else
372 max = MAX_CONDITIONAL_EXECUTE;
373 n_insns += count_bb_insns (then_bb);
374 if (n_insns > max)
375 return FALSE;
377 /* Map test_expr/test_jump into the appropriate MD tests to use on
378 the conditionally executed code. */
380 true_expr = test_expr;
381 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
382 GET_MODE (true_expr), XEXP (true_expr, 0),
383 XEXP (true_expr, 1));
385 #ifdef IFCVT_MODIFY_TESTS
386 /* If the machine description needs to modify the tests, such as setting a
387 conditional execution register from a comparison, it can do so here. */
388 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
389 join_bb);
391 /* See if the conversion failed */
392 if (!true_expr || !false_expr)
393 goto fail;
394 #endif
396 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
397 if (true_prob_val)
399 true_prob_val = XEXP (true_prob_val, 0);
400 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
402 else
403 false_prob_val = NULL_RTX;
405 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
406 on then THEN block. */
407 then_mod_ok = (else_bb == NULL_BLOCK);
409 /* Go through the THEN and ELSE blocks converting the insns if possible
410 to conditional execution. */
412 if (then_end
413 && ! cond_exec_process_insns (then_start, then_end,
414 false_expr, false_prob_val, then_mod_ok))
415 goto fail;
417 if (else_bb
418 && ! cond_exec_process_insns (else_start, else_end,
419 true_expr, true_prob_val, TRUE))
420 goto fail;
422 if (! apply_change_group ())
423 return FALSE;
425 #ifdef IFCVT_MODIFY_FINAL
426 /* Do any machine dependent final modifications */
427 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
428 #endif
430 /* Conversion succeeded. */
431 if (rtl_dump_file)
432 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
433 n_insns, (n_insns == 1) ? " was" : "s were");
435 /* Merge the blocks! */
436 merge_if_block (test_bb, then_bb, else_bb, join_bb);
437 return TRUE;
439 fail:
440 #ifdef IFCVT_MODIFY_CANCEL
441 /* Cancel any machine dependent changes. */
442 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
443 #endif
445 cancel_changes (0);
446 return FALSE;
449 /* Used by noce_process_if_block to communicate with its subroutines.
451 The subroutines know that A and B may be evaluated freely. They
452 know that X is a register. They should insert new instructions
453 before cond_earliest. */
455 struct noce_if_info
457 basic_block test_bb;
458 rtx insn_a, insn_b;
459 rtx x, a, b;
460 rtx jump, cond, cond_earliest;
463 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
464 rtx, int, int));
465 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
466 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
467 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
468 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
469 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
470 rtx, enum rtx_code, rtx,
471 rtx, rtx, rtx));
472 static int noce_try_cmove PARAMS ((struct noce_if_info *));
473 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
474 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
475 rtx, rtx *));
476 static int noce_try_minmax PARAMS ((struct noce_if_info *));
477 static int noce_try_abs PARAMS ((struct noce_if_info *));
479 /* Helper function for noce_try_store_flag*. */
481 static rtx
482 noce_emit_store_flag (if_info, x, reversep, normalize)
483 struct noce_if_info *if_info;
484 rtx x;
485 int reversep, normalize;
487 rtx cond = if_info->cond;
488 int cond_complex;
489 enum rtx_code code;
491 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
492 || ! general_operand (XEXP (cond, 1), VOIDmode));
494 /* If earliest == jump, or when the condition is complex, try to
495 build the store_flag insn directly. */
497 if (cond_complex)
498 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
500 if (reversep)
501 code = reversed_comparison_code (cond, if_info->jump);
502 else
503 code = GET_CODE (cond);
505 if ((if_info->cond_earliest == if_info->jump || cond_complex)
506 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
508 rtx tmp;
510 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
511 XEXP (cond, 1));
512 tmp = gen_rtx_SET (VOIDmode, x, tmp);
514 start_sequence ();
515 tmp = emit_insn (tmp);
517 if (recog_memoized (tmp) >= 0)
519 tmp = get_insns ();
520 end_sequence ();
521 emit_insns (tmp);
523 if_info->cond_earliest = if_info->jump;
525 return x;
528 end_sequence ();
531 /* Don't even try if the comparison operands are weird. */
532 if (cond_complex)
533 return NULL_RTX;
535 return emit_store_flag (x, code, XEXP (cond, 0),
536 XEXP (cond, 1), VOIDmode,
537 (code == LTU || code == LEU
538 || code == GEU || code == GTU), normalize);
541 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
542 static void
543 noce_emit_move_insn (x, y)
544 rtx x, y;
546 enum machine_mode outmode, inmode;
547 rtx outer, inner;
548 int bitpos;
550 if (GET_CODE (x) != STRICT_LOW_PART)
552 emit_move_insn (x, y);
553 return;
556 outer = XEXP (x, 0);
557 inner = XEXP (outer, 0);
558 outmode = GET_MODE (outer);
559 inmode = GET_MODE (inner);
560 bitpos = SUBREG_WORD (outer) * BITS_PER_WORD;
561 if (BYTES_BIG_ENDIAN)
562 bitpos += (GET_MODE_BITSIZE (inmode) - GET_MODE_BITSIZE (outmode))
563 % BITS_PER_WORD;
564 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
565 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
566 GET_MODE_BITSIZE (inmode));
569 /* Convert "if (test) x = 1; else x = 0".
571 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
572 tried in noce_try_store_flag_constants after noce_try_cmove has had
573 a go at the conversion. */
575 static int
576 noce_try_store_flag (if_info)
577 struct noce_if_info *if_info;
579 int reversep;
580 rtx target, seq;
582 if (GET_CODE (if_info->b) == CONST_INT
583 && INTVAL (if_info->b) == STORE_FLAG_VALUE
584 && if_info->a == const0_rtx)
585 reversep = 0;
586 else if (if_info->b == const0_rtx
587 && GET_CODE (if_info->a) == CONST_INT
588 && INTVAL (if_info->a) == STORE_FLAG_VALUE
589 && (reversed_comparison_code (if_info->cond, if_info->jump)
590 != UNKNOWN))
591 reversep = 1;
592 else
593 return FALSE;
595 start_sequence ();
597 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
598 if (target)
600 if (target != if_info->x)
601 noce_emit_move_insn (if_info->x, target);
603 seq = get_insns ();
604 end_sequence ();
605 emit_insns_before (seq, if_info->cond_earliest);
607 return TRUE;
609 else
611 end_sequence ();
612 return FALSE;
616 /* Convert "if (test) x = a; else x = b", for A and B constant. */
618 static int
619 noce_try_store_flag_constants (if_info)
620 struct noce_if_info *if_info;
622 rtx target, seq;
623 int reversep;
624 HOST_WIDE_INT itrue, ifalse, diff, tmp;
625 int normalize, can_reverse;
627 if (! no_new_pseudos
628 && GET_CODE (if_info->a) == CONST_INT
629 && GET_CODE (if_info->b) == CONST_INT)
631 ifalse = INTVAL (if_info->a);
632 itrue = INTVAL (if_info->b);
633 diff = itrue - ifalse;
635 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
636 != UNKNOWN);
638 reversep = 0;
639 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
640 normalize = 0;
641 else if (ifalse == 0 && exact_log2 (itrue) >= 0
642 && (STORE_FLAG_VALUE == 1
643 || BRANCH_COST >= 2))
644 normalize = 1;
645 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
646 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
647 normalize = 1, reversep = 1;
648 else if (itrue == -1
649 && (STORE_FLAG_VALUE == -1
650 || BRANCH_COST >= 2))
651 normalize = -1;
652 else if (ifalse == -1 && can_reverse
653 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
654 normalize = -1, reversep = 1;
655 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
656 || BRANCH_COST >= 3)
657 normalize = -1;
658 else
659 return FALSE;
661 if (reversep)
663 tmp = itrue; itrue = ifalse; ifalse = tmp;
664 diff = -diff;
667 start_sequence ();
668 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
669 if (! target)
671 end_sequence ();
672 return FALSE;
675 /* if (test) x = 3; else x = 4;
676 => x = 3 + (test == 0); */
677 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
679 target = expand_binop (GET_MODE (if_info->x),
680 (diff == STORE_FLAG_VALUE
681 ? add_optab : sub_optab),
682 GEN_INT (ifalse), target, if_info->x, 0,
683 OPTAB_WIDEN);
686 /* if (test) x = 8; else x = 0;
687 => x = (test != 0) << 3; */
688 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
690 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
691 target, GEN_INT (tmp), if_info->x, 0,
692 OPTAB_WIDEN);
695 /* if (test) x = -1; else x = b;
696 => x = -(test != 0) | b; */
697 else if (itrue == -1)
699 target = expand_binop (GET_MODE (if_info->x), ior_optab,
700 target, GEN_INT (ifalse), if_info->x, 0,
701 OPTAB_WIDEN);
704 /* if (test) x = a; else x = b;
705 => x = (-(test != 0) & (b - a)) + a; */
706 else
708 target = expand_binop (GET_MODE (if_info->x), and_optab,
709 target, GEN_INT (diff), if_info->x, 0,
710 OPTAB_WIDEN);
711 if (target)
712 target = expand_binop (GET_MODE (if_info->x), add_optab,
713 target, GEN_INT (ifalse), if_info->x, 0,
714 OPTAB_WIDEN);
717 if (! target)
719 end_sequence ();
720 return FALSE;
723 if (target != if_info->x)
724 noce_emit_move_insn (if_info->x, target);
726 seq = get_insns ();
727 end_sequence ();
729 if (seq_contains_jump (seq))
730 return FALSE;
732 emit_insns_before (seq, if_info->cond_earliest);
734 return TRUE;
737 return FALSE;
740 /* Convert "if (test) foo++" into "foo += (test != 0)", and
741 similarly for "foo--". */
743 static int
744 noce_try_store_flag_inc (if_info)
745 struct noce_if_info *if_info;
747 rtx target, seq;
748 int subtract, normalize;
750 if (! no_new_pseudos
751 && (BRANCH_COST >= 2
752 || HAVE_incscc
753 || HAVE_decscc)
754 /* Should be no `else' case to worry about. */
755 && if_info->b == if_info->x
756 && GET_CODE (if_info->a) == PLUS
757 && (XEXP (if_info->a, 1) == const1_rtx
758 || XEXP (if_info->a, 1) == constm1_rtx)
759 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
760 && (reversed_comparison_code (if_info->cond, if_info->jump)
761 != UNKNOWN))
763 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
764 subtract = 0, normalize = 0;
765 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
766 subtract = 1, normalize = 0;
767 else
768 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
770 start_sequence ();
772 target = noce_emit_store_flag (if_info,
773 gen_reg_rtx (GET_MODE (if_info->x)),
774 1, normalize);
776 if (target)
777 target = expand_binop (GET_MODE (if_info->x),
778 subtract ? sub_optab : add_optab,
779 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
780 if (target)
782 if (target != if_info->x)
783 noce_emit_move_insn (if_info->x, target);
785 seq = get_insns ();
786 end_sequence ();
788 if (seq_contains_jump (seq))
789 return FALSE;
791 emit_insns_before (seq, if_info->cond_earliest);
793 return TRUE;
796 end_sequence ();
799 return FALSE;
802 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
804 static int
805 noce_try_store_flag_mask (if_info)
806 struct noce_if_info *if_info;
808 rtx target, seq;
809 int reversep;
811 reversep = 0;
812 if (! no_new_pseudos
813 && (BRANCH_COST >= 2
814 || STORE_FLAG_VALUE == -1)
815 && ((if_info->a == const0_rtx
816 && rtx_equal_p (if_info->b, if_info->x))
817 || ((reversep = (reversed_comparison_code (if_info->cond,
818 if_info->jump)
819 != UNKNOWN))
820 && if_info->b == const0_rtx
821 && rtx_equal_p (if_info->a, if_info->x))))
823 start_sequence ();
824 target = noce_emit_store_flag (if_info,
825 gen_reg_rtx (GET_MODE (if_info->x)),
826 reversep, -1);
827 if (target)
828 target = expand_binop (GET_MODE (if_info->x), and_optab,
829 if_info->x, target, if_info->x, 0,
830 OPTAB_WIDEN);
832 if (target)
834 if (target != if_info->x)
835 noce_emit_move_insn (if_info->x, target);
837 seq = get_insns ();
838 end_sequence ();
840 if (seq_contains_jump (seq))
841 return FALSE;
843 emit_insns_before (seq, if_info->cond_earliest);
845 return TRUE;
848 end_sequence ();
851 return FALSE;
854 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
856 static rtx
857 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
858 struct noce_if_info *if_info;
859 rtx x, cmp_a, cmp_b, vfalse, vtrue;
860 enum rtx_code code;
862 /* If earliest == jump, try to build the cmove insn directly.
863 This is helpful when combine has created some complex condition
864 (like for alpha's cmovlbs) that we can't hope to regenerate
865 through the normal interface. */
867 if (if_info->cond_earliest == if_info->jump)
869 rtx tmp;
871 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
872 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
873 tmp = gen_rtx_SET (VOIDmode, x, tmp);
875 start_sequence ();
876 tmp = emit_insn (tmp);
878 if (recog_memoized (tmp) >= 0)
880 tmp = get_insns ();
881 end_sequence ();
882 emit_insns (tmp);
884 return x;
887 end_sequence ();
890 /* Don't even try if the comparison operands are weird. */
891 if (! general_operand (cmp_a, GET_MODE (cmp_a))
892 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
893 return NULL_RTX;
895 #if HAVE_conditional_move
896 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
897 vtrue, vfalse, GET_MODE (x),
898 (code == LTU || code == GEU
899 || code == LEU || code == GTU));
900 #else
901 /* We'll never get here, as noce_process_if_block doesn't call the
902 functions involved. Ifdef code, however, should be discouraged
903 because it leads to typos in the code not selected. However,
904 emit_conditional_move won't exist either. */
905 return NULL_RTX;
906 #endif
909 /* Try only simple constants and registers here. More complex cases
910 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
911 has had a go at it. */
913 static int
914 noce_try_cmove (if_info)
915 struct noce_if_info *if_info;
917 enum rtx_code code;
918 rtx target, seq;
920 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
921 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
923 start_sequence ();
925 code = GET_CODE (if_info->cond);
926 target = noce_emit_cmove (if_info, if_info->x, code,
927 XEXP (if_info->cond, 0),
928 XEXP (if_info->cond, 1),
929 if_info->a, if_info->b);
931 if (target)
933 if (target != if_info->x)
934 noce_emit_move_insn (if_info->x, target);
936 seq = get_insns ();
937 end_sequence ();
938 emit_insns_before (seq, if_info->cond_earliest);
939 return TRUE;
941 else
943 end_sequence ();
944 return FALSE;
948 return FALSE;
951 /* Try more complex cases involving conditional_move. */
953 static int
954 noce_try_cmove_arith (if_info)
955 struct noce_if_info *if_info;
957 rtx a = if_info->a;
958 rtx b = if_info->b;
959 rtx x = if_info->x;
960 rtx insn_a, insn_b;
961 rtx tmp, target;
962 int is_mem = 0;
963 enum rtx_code code;
965 /* A conditional move from two memory sources is equivalent to a
966 conditional on their addresses followed by a load. Don't do this
967 early because it'll screw alias analysis. Note that we've
968 already checked for no side effects. */
969 if (! no_new_pseudos && cse_not_expected
970 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
971 && BRANCH_COST >= 5)
973 a = XEXP (a, 0);
974 b = XEXP (b, 0);
975 x = gen_reg_rtx (Pmode);
976 is_mem = 1;
979 /* ??? We could handle this if we knew that a load from A or B could
980 not fault. This is also true if we've already loaded
981 from the address along the path from ENTRY. */
982 else if (may_trap_p (a) || may_trap_p (b))
983 return FALSE;
985 /* if (test) x = a + b; else x = c - d;
986 => y = a + b;
987 x = c - d;
988 if (test)
989 x = y;
992 code = GET_CODE (if_info->cond);
993 insn_a = if_info->insn_a;
994 insn_b = if_info->insn_b;
996 /* Possibly rearrange operands to make things come out more natural. */
997 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
999 int reversep = 0;
1000 if (rtx_equal_p (b, x))
1001 reversep = 1;
1002 else if (general_operand (b, GET_MODE (b)))
1003 reversep = 1;
1005 if (reversep)
1007 code = reversed_comparison_code (if_info->cond, if_info->jump);
1008 tmp = a, a = b, b = tmp;
1009 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1013 start_sequence ();
1015 /* If either operand is complex, load it into a register first.
1016 The best way to do this is to copy the original insn. In this
1017 way we preserve any clobbers etc that the insn may have had.
1018 This is of course not possible in the IS_MEM case. */
1019 if (! general_operand (a, GET_MODE (a)))
1021 rtx set;
1023 if (no_new_pseudos)
1024 goto end_seq_and_fail;
1026 if (is_mem)
1028 tmp = gen_reg_rtx (GET_MODE (a));
1029 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1031 else if (! insn_a)
1032 goto end_seq_and_fail;
1033 else
1035 a = gen_reg_rtx (GET_MODE (a));
1036 tmp = copy_rtx (insn_a);
1037 set = single_set (tmp);
1038 SET_DEST (set) = a;
1039 tmp = emit_insn (PATTERN (tmp));
1041 if (recog_memoized (tmp) < 0)
1042 goto end_seq_and_fail;
1044 if (! general_operand (b, GET_MODE (b)))
1046 rtx set;
1048 if (no_new_pseudos)
1049 goto end_seq_and_fail;
1051 if (is_mem)
1053 tmp = gen_reg_rtx (GET_MODE (b));
1054 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1056 else if (! insn_b)
1057 goto end_seq_and_fail;
1058 else
1060 b = gen_reg_rtx (GET_MODE (b));
1061 tmp = copy_rtx (insn_b);
1062 set = single_set (tmp);
1063 SET_DEST (set) = b;
1064 tmp = emit_insn (PATTERN (tmp));
1066 if (recog_memoized (tmp) < 0)
1067 goto end_seq_and_fail;
1070 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1071 XEXP (if_info->cond, 1), a, b);
1073 if (! target)
1074 goto end_seq_and_fail;
1076 /* If we're handling a memory for above, emit the load now. */
1077 if (is_mem)
1079 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1081 /* Copy over flags as appropriate. */
1082 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1083 MEM_VOLATILE_P (tmp) = 1;
1084 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1085 MEM_IN_STRUCT_P (tmp) = 1;
1086 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1087 MEM_SCALAR_P (tmp) = 1;
1088 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1089 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1091 noce_emit_move_insn (if_info->x, tmp);
1093 else if (target != x)
1094 noce_emit_move_insn (x, target);
1096 tmp = get_insns ();
1097 end_sequence ();
1098 emit_insns_before (tmp, if_info->cond_earliest);
1099 return TRUE;
1101 end_seq_and_fail:
1102 end_sequence ();
1103 return FALSE;
1106 /* For most cases, the simplified condition we found is the best
1107 choice, but this is not the case for the min/max/abs transforms.
1108 For these we wish to know that it is A or B in the condition. */
1110 static rtx
1111 noce_get_alt_condition (if_info, target, earliest)
1112 struct noce_if_info *if_info;
1113 rtx target;
1114 rtx *earliest;
1116 rtx cond, set, insn;
1117 int reverse;
1119 /* If target is already mentioned in the known condition, return it. */
1120 if (reg_mentioned_p (target, if_info->cond))
1122 *earliest = if_info->cond_earliest;
1123 return if_info->cond;
1126 set = pc_set (if_info->jump);
1127 cond = XEXP (SET_SRC (set), 0);
1128 reverse
1129 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1130 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1132 cond = canonicalize_condition (if_info->jump, cond, reverse,
1133 earliest, target);
1134 if (! cond || ! reg_mentioned_p (target, cond))
1135 return NULL;
1137 /* We almost certainly searched back to a different place.
1138 Need to re-verify correct lifetimes. */
1140 /* X may not be mentioned in the range (cond_earliest, jump]. */
1141 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1142 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1143 return NULL;
1145 /* A and B may not be modified in the range [cond_earliest, jump). */
1146 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1147 if (INSN_P (insn)
1148 && (modified_in_p (if_info->a, insn)
1149 || modified_in_p (if_info->b, insn)))
1150 return NULL;
1152 return cond;
1155 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1157 static int
1158 noce_try_minmax (if_info)
1159 struct noce_if_info *if_info;
1161 rtx cond, earliest, target, seq;
1162 enum rtx_code code;
1163 int unsignedp;
1164 optab op;
1166 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1167 if (no_new_pseudos)
1168 return FALSE;
1170 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1171 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1172 to get the target to tell us... */
1173 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1174 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1175 && ! flag_unsafe_math_optimizations)
1176 return FALSE;
1178 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1179 if (!cond)
1180 return FALSE;
1182 /* Verify the condition is of the form we expect, and canonicalize
1183 the comparison code. */
1184 code = GET_CODE (cond);
1185 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1187 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1188 return FALSE;
1190 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1192 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1193 return FALSE;
1194 code = swap_condition (code);
1196 else
1197 return FALSE;
1199 /* Determine what sort of operation this is. Note that the code is for
1200 a taken branch, so the code->operation mapping appears backwards. */
1201 switch (code)
1203 case LT:
1204 case LE:
1205 case UNLT:
1206 case UNLE:
1207 op = smax_optab;
1208 unsignedp = 0;
1209 break;
1210 case GT:
1211 case GE:
1212 case UNGT:
1213 case UNGE:
1214 op = smin_optab;
1215 unsignedp = 0;
1216 break;
1217 case LTU:
1218 case LEU:
1219 op = umax_optab;
1220 unsignedp = 1;
1221 break;
1222 case GTU:
1223 case GEU:
1224 op = umin_optab;
1225 unsignedp = 1;
1226 break;
1227 default:
1228 return FALSE;
1231 start_sequence ();
1233 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1234 if_info->x, unsignedp, OPTAB_WIDEN);
1235 if (! target)
1237 end_sequence ();
1238 return FALSE;
1240 if (target != if_info->x)
1241 noce_emit_move_insn (if_info->x, target);
1243 seq = get_insns ();
1244 end_sequence ();
1246 if (seq_contains_jump (seq))
1247 return FALSE;
1249 emit_insns_before (seq, earliest);
1250 if_info->cond = cond;
1251 if_info->cond_earliest = earliest;
1253 return TRUE;
1256 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1258 static int
1259 noce_try_abs (if_info)
1260 struct noce_if_info *if_info;
1262 rtx cond, earliest, target, seq, a, b, c;
1263 int negate;
1265 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1266 if (no_new_pseudos)
1267 return FALSE;
1269 /* Recognize A and B as constituting an ABS or NABS. */
1270 a = if_info->a;
1271 b = if_info->b;
1272 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1273 negate = 0;
1274 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1276 c = a; a = b; b = c;
1277 negate = 1;
1279 else
1280 return FALSE;
1282 cond = noce_get_alt_condition (if_info, b, &earliest);
1283 if (!cond)
1284 return FALSE;
1286 /* Verify the condition is of the form we expect. */
1287 if (rtx_equal_p (XEXP (cond, 0), b))
1288 c = XEXP (cond, 1);
1289 else if (rtx_equal_p (XEXP (cond, 1), b))
1290 c = XEXP (cond, 0);
1291 else
1292 return FALSE;
1294 /* Verify that C is zero. Search backward through the block for
1295 a REG_EQUAL note if necessary. */
1296 if (REG_P (c))
1298 rtx insn, note = NULL;
1299 for (insn = earliest;
1300 insn != if_info->test_bb->head;
1301 insn = PREV_INSN (insn))
1302 if (INSN_P (insn)
1303 && ((note = find_reg_note (insn, REG_EQUAL, c))
1304 || (note = find_reg_note (insn, REG_EQUIV, c))))
1305 break;
1306 if (! note)
1307 return FALSE;
1308 c = XEXP (note, 0);
1310 if (GET_CODE (c) == MEM
1311 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1312 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1313 c = get_pool_constant (XEXP (c, 0));
1315 /* Work around funny ideas get_condition has wrt canonicalization.
1316 Note that these rtx constants are known to be CONST_INT, and
1317 therefore imply integer comparisons. */
1318 if (c == constm1_rtx && GET_CODE (cond) == GT)
1320 else if (c == const1_rtx && GET_CODE (cond) == LT)
1322 else if (c != CONST0_RTX (GET_MODE (b)))
1323 return FALSE;
1325 /* Determine what sort of operation this is. */
1326 switch (GET_CODE (cond))
1328 case LT:
1329 case LE:
1330 case UNLT:
1331 case UNLE:
1332 negate = !negate;
1333 break;
1334 case GT:
1335 case GE:
1336 case UNGT:
1337 case UNGE:
1338 break;
1339 default:
1340 return FALSE;
1343 start_sequence ();
1345 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1347 /* ??? It's a quandry whether cmove would be better here, especially
1348 for integers. Perhaps combine will clean things up. */
1349 if (target && negate)
1350 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1352 if (! target)
1354 end_sequence ();
1355 return FALSE;
1358 if (target != if_info->x)
1359 noce_emit_move_insn (if_info->x, target);
1361 seq = get_insns ();
1362 end_sequence ();
1364 if (seq_contains_jump (seq))
1365 return FALSE;
1367 emit_insns_before (seq, earliest);
1368 if_info->cond = cond;
1369 if_info->cond_earliest = earliest;
1371 return TRUE;
1374 /* Look for the condition for the jump first. We'd prefer to avoid
1375 get_condition if we can -- it tries to look back for the contents
1376 of an original compare. On targets that use normal integers for
1377 comparisons, e.g. alpha, this is wasteful. */
1379 static rtx
1380 noce_get_condition (jump, earliest)
1381 rtx jump;
1382 rtx *earliest;
1384 rtx cond;
1385 rtx set;
1387 /* If the condition variable is a register and is MODE_INT, accept it.
1388 Otherwise, fall back on get_condition. */
1390 if (! any_condjump_p (jump))
1391 return NULL_RTX;
1393 set = pc_set (jump);
1395 cond = XEXP (SET_SRC (set), 0);
1396 if (GET_CODE (XEXP (cond, 0)) == REG
1397 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1399 *earliest = jump;
1401 /* If this branches to JUMP_LABEL when the condition is false,
1402 reverse the condition. */
1403 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1404 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1405 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1406 GET_MODE (cond), XEXP (cond, 0),
1407 XEXP (cond, 1));
1409 else
1410 cond = get_condition (jump, earliest);
1412 return cond;
1415 /* Return true if OP is ok for if-then-else processing. */
1417 static int
1418 noce_operand_ok (op)
1419 rtx op;
1421 /* We special-case memories, so handle any of them with
1422 no address side effects. */
1423 if (GET_CODE (op) == MEM)
1424 return ! side_effects_p (XEXP (op, 0));
1426 if (side_effects_p (op))
1427 return FALSE;
1429 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1430 being linked into the genfoo programs. This is probably a mistake.
1431 With finite operands, most fp operations don't trap. */
1432 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1433 switch (GET_CODE (op))
1435 case DIV:
1436 case MOD:
1437 case UDIV:
1438 case UMOD:
1439 /* ??? This is kinda lame -- almost every target will have forced
1440 the constant into a register first. But given the expense of
1441 division, this is probably for the best. */
1442 return (CONSTANT_P (XEXP (op, 1))
1443 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1444 && ! may_trap_p (XEXP (op, 0)));
1446 default:
1447 switch (GET_RTX_CLASS (GET_CODE (op)))
1449 case '1':
1450 return ! may_trap_p (XEXP (op, 0));
1451 case 'c':
1452 case '2':
1453 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1455 break;
1458 return ! may_trap_p (op);
1461 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1462 without using conditional execution. Return TRUE if we were
1463 successful at converting the the block. */
1465 static int
1466 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1467 basic_block test_bb; /* Basic block test is in */
1468 basic_block then_bb; /* Basic block for THEN block */
1469 basic_block else_bb; /* Basic block for ELSE block */
1470 basic_block join_bb; /* Basic block the join label is in */
1472 /* We're looking for patterns of the form
1474 (1) if (...) x = a; else x = b;
1475 (2) x = b; if (...) x = a;
1476 (3) if (...) x = a; // as if with an initial x = x.
1478 The later patterns require jumps to be more expensive.
1480 ??? For future expansion, look for multiple X in such patterns. */
1482 struct noce_if_info if_info;
1483 rtx insn_a, insn_b;
1484 rtx set_a, set_b;
1485 rtx orig_x, x, a, b;
1486 rtx jump, cond, insn;
1488 /* If this is not a standard conditional jump, we can't parse it. */
1489 jump = test_bb->end;
1490 cond = noce_get_condition (jump, &if_info.cond_earliest);
1491 if (! cond)
1492 return FALSE;
1494 /* If the conditional jump is more than just a conditional jump,
1495 then we can not do if-conversion on this block. */
1496 if (! onlyjump_p (jump))
1497 return FALSE;
1499 /* We must be comparing objects whose modes imply the size. */
1500 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1501 return FALSE;
1503 /* Look for one of the potential sets. */
1504 insn_a = first_active_insn (then_bb);
1505 if (! insn_a
1506 || ! last_active_insn_p (then_bb, insn_a)
1507 || (set_a = single_set (insn_a)) == NULL_RTX)
1508 return FALSE;
1510 x = SET_DEST (set_a);
1511 a = SET_SRC (set_a);
1513 /* Look for the other potential set. Make sure we've got equivalent
1514 destinations. */
1515 /* ??? This is overconservative. Storing to two different mems is
1516 as easy as conditionally computing the address. Storing to a
1517 single mem merely requires a scratch memory to use as one of the
1518 destination addresses; often the memory immediately below the
1519 stack pointer is available for this. */
1520 set_b = NULL_RTX;
1521 if (else_bb)
1523 insn_b = first_active_insn (else_bb);
1524 if (! insn_b
1525 || ! last_active_insn_p (else_bb, insn_b)
1526 || (set_b = single_set (insn_b)) == NULL_RTX
1527 || ! rtx_equal_p (x, SET_DEST (set_b)))
1528 return FALSE;
1530 else
1532 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1533 if (! insn_b
1534 || GET_CODE (insn_b) != INSN
1535 || (set_b = single_set (insn_b)) == NULL_RTX
1536 || ! rtx_equal_p (x, SET_DEST (set_b))
1537 || reg_mentioned_p (x, cond)
1538 || reg_mentioned_p (x, a)
1539 || reg_mentioned_p (x, SET_SRC (set_b)))
1540 insn_b = set_b = NULL_RTX;
1542 b = (set_b ? SET_SRC (set_b) : x);
1544 /* X may not be mentioned in the range (cond_earliest, jump]. */
1545 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1546 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1547 return FALSE;
1549 /* A and B may not be modified in the range [cond_earliest, jump). */
1550 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1551 if (INSN_P (insn)
1552 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1553 return FALSE;
1555 /* Only operate on register destinations, and even then avoid extending
1556 the lifetime of hard registers on small register class machines. */
1557 orig_x = x;
1558 if (GET_CODE (x) != REG
1559 || (SMALL_REGISTER_CLASSES
1560 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1562 if (no_new_pseudos)
1563 return FALSE;
1564 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1565 ? XEXP (x, 0) : x));
1568 /* Don't operate on sources that may trap or are volatile. */
1569 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1570 return FALSE;
1572 /* Set up the info block for our subroutines. */
1573 if_info.test_bb = test_bb;
1574 if_info.cond = cond;
1575 if_info.jump = jump;
1576 if_info.insn_a = insn_a;
1577 if_info.insn_b = insn_b;
1578 if_info.x = x;
1579 if_info.a = a;
1580 if_info.b = b;
1582 /* Try optimizations in some approximation of a useful order. */
1583 /* ??? Should first look to see if X is live incoming at all. If it
1584 isn't, we don't need anything but an unconditional set. */
1586 /* Look and see if A and B are really the same. Avoid creating silly
1587 cmove constructs that no one will fix up later. */
1588 if (rtx_equal_p (a, b))
1590 /* If we have an INSN_B, we don't have to create any new rtl. Just
1591 move the instruction that we already have. If we don't have an
1592 INSN_B, that means that A == X, and we've got a noop move. In
1593 that case don't do anything and let the code below delete INSN_A. */
1594 if (insn_b && else_bb)
1596 rtx note;
1598 if (else_bb && insn_b == else_bb->end)
1599 else_bb->end = PREV_INSN (insn_b);
1600 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1602 /* If there was a REG_EQUAL note, delete it since it may have been
1603 true due to this insn being after a jump. */
1604 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1605 remove_note (insn_b, note);
1607 insn_b = NULL_RTX;
1609 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1610 x must be executed twice. */
1611 else if (insn_b && side_effects_p (orig_x))
1612 return FALSE;
1614 x = orig_x;
1615 goto success;
1618 if (noce_try_store_flag (&if_info))
1619 goto success;
1620 if (noce_try_minmax (&if_info))
1621 goto success;
1622 if (noce_try_abs (&if_info))
1623 goto success;
1624 if (HAVE_conditional_move
1625 && noce_try_cmove (&if_info))
1626 goto success;
1627 if (! HAVE_conditional_execution)
1629 if (noce_try_store_flag_constants (&if_info))
1630 goto success;
1631 if (noce_try_store_flag_inc (&if_info))
1632 goto success;
1633 if (noce_try_store_flag_mask (&if_info))
1634 goto success;
1635 if (HAVE_conditional_move
1636 && noce_try_cmove_arith (&if_info))
1637 goto success;
1640 return FALSE;
1642 success:
1643 /* The original sets may now be killed. */
1644 if (insn_a == then_bb->end)
1645 then_bb->end = PREV_INSN (insn_a);
1646 flow_delete_insn (insn_a);
1648 /* Several special cases here: First, we may have reused insn_b above,
1649 in which case insn_b is now NULL. Second, we want to delete insn_b
1650 if it came from the ELSE block, because follows the now correct
1651 write that appears in the TEST block. However, if we got insn_b from
1652 the TEST block, it may in fact be loading data needed for the comparison.
1653 We'll let life_analysis remove the insn if it's really dead. */
1654 if (insn_b && else_bb)
1656 if (insn_b == else_bb->end)
1657 else_bb->end = PREV_INSN (insn_b);
1658 flow_delete_insn (insn_b);
1661 /* The new insns will have been inserted before cond_earliest. We should
1662 be able to remove the jump with impunity, but the condition itself may
1663 have been modified by gcse to be shared across basic blocks. */
1664 test_bb->end = PREV_INSN (jump);
1665 flow_delete_insn (jump);
1667 /* If we used a temporary, fix it up now. */
1668 if (orig_x != x)
1670 start_sequence ();
1671 noce_emit_move_insn (orig_x, x);
1672 insn_b = gen_sequence ();
1673 end_sequence ();
1675 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1678 /* Merge the blocks! */
1679 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1681 return TRUE;
1684 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1685 straight line code. Return true if successful. */
1687 static int
1688 process_if_block (test_bb, then_bb, else_bb, join_bb)
1689 basic_block test_bb; /* Basic block test is in */
1690 basic_block then_bb; /* Basic block for THEN block */
1691 basic_block else_bb; /* Basic block for ELSE block */
1692 basic_block join_bb; /* Basic block the join label is in */
1694 if (! reload_completed
1695 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1696 return TRUE;
1698 if (HAVE_conditional_execution
1699 && reload_completed
1700 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1701 return TRUE;
1703 return FALSE;
1706 /* Merge the blocks and mark for local life update. */
1708 static void
1709 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1710 basic_block test_bb; /* Basic block test is in */
1711 basic_block then_bb; /* Basic block for THEN block */
1712 basic_block else_bb; /* Basic block for ELSE block */
1713 basic_block join_bb; /* Basic block the join label is in */
1715 basic_block combo_bb;
1717 /* All block merging is done into the lower block numbers. */
1719 combo_bb = test_bb;
1721 /* First merge TEST block into THEN block. This is a no-brainer since
1722 the THEN block did not have a code label to begin with. */
1724 if (combo_bb->global_live_at_end)
1725 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1726 merge_blocks_nomove (combo_bb, then_bb);
1727 num_removed_blocks++;
1729 /* The ELSE block, if it existed, had a label. That label count
1730 will almost always be zero, but odd things can happen when labels
1731 get their addresses taken. */
1732 if (else_bb)
1734 merge_blocks_nomove (combo_bb, else_bb);
1735 num_removed_blocks++;
1738 /* If there was no join block reported, that means it was not adjacent
1739 to the others, and so we cannot merge them. */
1741 if (! join_bb)
1743 /* The outgoing edge for the current COMBO block should already
1744 be correct. Verify this. */
1745 if (combo_bb->succ == NULL_EDGE)
1746 abort ();
1748 /* There should sill be a branch at the end of the THEN or ELSE
1749 blocks taking us to our final destination. */
1750 if (! simplejump_p (combo_bb->end)
1751 && ! returnjump_p (combo_bb->end))
1752 abort ();
1755 /* The JOIN block may have had quite a number of other predecessors too.
1756 Since we've already merged the TEST, THEN and ELSE blocks, we should
1757 have only one remaining edge from our if-then-else diamond. If there
1758 is more than one remaining edge, it must come from elsewhere. There
1759 may be zero incoming edges if the THEN block didn't actually join
1760 back up (as with a call to abort). */
1761 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1763 /* We can merge the JOIN. */
1764 if (combo_bb->global_live_at_end)
1765 COPY_REG_SET (combo_bb->global_live_at_end,
1766 join_bb->global_live_at_end);
1767 merge_blocks_nomove (combo_bb, join_bb);
1768 num_removed_blocks++;
1770 else
1772 /* We cannot merge the JOIN. */
1774 /* The outgoing edge for the current COMBO block should already
1775 be correct. Verify this. */
1776 if (combo_bb->succ->succ_next != NULL_EDGE
1777 || combo_bb->succ->dest != join_bb)
1778 abort ();
1780 /* Remove the jump and cruft from the end of the COMBO block. */
1781 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1784 /* Make sure we update life info properly. */
1785 SET_UPDATE_LIFE (combo_bb);
1787 num_updated_if_blocks++;
1790 /* Find a block ending in a simple IF condition. Return TRUE if
1791 we were able to transform it in some way. */
1793 static int
1794 find_if_header (test_bb)
1795 basic_block test_bb;
1797 edge then_edge;
1798 edge else_edge;
1800 /* The kind of block we're looking for has exactly two successors. */
1801 if ((then_edge = test_bb->succ) == NULL_EDGE
1802 || (else_edge = then_edge->succ_next) == NULL_EDGE
1803 || else_edge->succ_next != NULL_EDGE)
1804 return FALSE;
1806 /* Neither edge should be abnormal. */
1807 if ((then_edge->flags & EDGE_COMPLEX)
1808 || (else_edge->flags & EDGE_COMPLEX))
1809 return FALSE;
1811 /* The THEN edge is canonically the one that falls through. */
1812 if (then_edge->flags & EDGE_FALLTHRU)
1814 else if (else_edge->flags & EDGE_FALLTHRU)
1816 edge e = else_edge;
1817 else_edge = then_edge;
1818 then_edge = e;
1820 else
1821 /* Otherwise this must be a multiway branch of some sort. */
1822 return FALSE;
1824 if (find_if_block (test_bb, then_edge, else_edge))
1825 goto success;
1826 if (post_dominators
1827 && (! HAVE_conditional_execution || reload_completed))
1829 if (find_if_case_1 (test_bb, then_edge, else_edge))
1830 goto success;
1831 if (find_if_case_2 (test_bb, then_edge, else_edge))
1832 goto success;
1835 return FALSE;
1837 success:
1838 if (rtl_dump_file)
1839 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1840 return TRUE;
1843 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1844 block. If so, we'll try to convert the insns to not require the branch.
1845 Return TRUE if we were successful at converting the the block. */
1847 static int
1848 find_if_block (test_bb, then_edge, else_edge)
1849 basic_block test_bb;
1850 edge then_edge, else_edge;
1852 basic_block then_bb = then_edge->dest;
1853 basic_block else_bb = else_edge->dest;
1854 basic_block join_bb = NULL_BLOCK;
1855 edge then_succ = then_bb->succ;
1856 edge else_succ = else_bb->succ;
1857 int next_index;
1859 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1860 if (then_bb->pred->pred_next != NULL_EDGE)
1861 return FALSE;
1863 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1864 if (then_succ != NULL_EDGE
1865 && (then_succ->succ_next != NULL_EDGE
1866 || (then_succ->flags & EDGE_COMPLEX)))
1867 return FALSE;
1869 /* If the THEN block has no successors, conditional execution can still
1870 make a conditional call. Don't do this unless the ELSE block has
1871 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1872 Check for the last insn of the THEN block being an indirect jump, which
1873 is listed as not having any successors, but confuses the rest of the CE
1874 code processing. XXX we should fix this in the future. */
1875 if (then_succ == NULL)
1877 if (else_bb->pred->pred_next == NULL_EDGE)
1879 rtx last_insn = then_bb->end;
1881 while (last_insn
1882 && GET_CODE (last_insn) == NOTE
1883 && last_insn != then_bb->head)
1884 last_insn = PREV_INSN (last_insn);
1886 if (last_insn
1887 && GET_CODE (last_insn) == JUMP_INSN
1888 && ! simplejump_p (last_insn))
1889 return FALSE;
1891 join_bb = else_bb;
1892 else_bb = NULL_BLOCK;
1894 else
1895 return FALSE;
1898 /* If the THEN block's successor is the other edge out of the TEST block,
1899 then we have an IF-THEN combo without an ELSE. */
1900 else if (then_succ->dest == else_bb)
1902 join_bb = else_bb;
1903 else_bb = NULL_BLOCK;
1906 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1907 has exactly one predecessor and one successor, and the outgoing edge
1908 is not complex, then we have an IF-THEN-ELSE combo. */
1909 else if (else_succ != NULL_EDGE
1910 && then_succ->dest == else_succ->dest
1911 && else_bb->pred->pred_next == NULL_EDGE
1912 && else_succ->succ_next == NULL_EDGE
1913 && ! (else_succ->flags & EDGE_COMPLEX))
1914 join_bb = else_succ->dest;
1916 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1917 else
1918 return FALSE;
1920 num_possible_if_blocks++;
1922 if (rtl_dump_file)
1924 if (else_bb)
1925 fprintf (rtl_dump_file,
1926 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1927 test_bb->index, then_bb->index, else_bb->index,
1928 join_bb->index);
1929 else
1930 fprintf (rtl_dump_file,
1931 "\nIF-THEN block found, start %d, then %d, join %d\n",
1932 test_bb->index, then_bb->index, join_bb->index);
1935 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1936 get the first condition for free, since we've already asserted that
1937 there's a fallthru edge from IF to THEN. */
1938 /* ??? As an enhancement, move the ELSE block. Have to deal with
1939 BLOCK notes, if by no other means than aborting the merge if they
1940 exist. Sticky enough I don't want to think about it now. */
1941 next_index = then_bb->index;
1942 if (else_bb && ++next_index != else_bb->index)
1943 return FALSE;
1944 if (++next_index != join_bb->index)
1946 if (else_bb)
1947 join_bb = NULL;
1948 else
1949 return FALSE;
1952 /* Do the real work. */
1953 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1956 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1957 transformable, but not necessarily the other. There need be no
1958 JOIN block.
1960 Return TRUE if we were successful at converting the the block.
1962 Cases we'd like to look at:
1965 if (test) goto over; // x not live
1966 x = a;
1967 goto label;
1968 over:
1970 becomes
1972 x = a;
1973 if (! test) goto label;
1976 if (test) goto E; // x not live
1977 x = big();
1978 goto L;
1980 x = b;
1981 goto M;
1983 becomes
1985 x = b;
1986 if (test) goto M;
1987 x = big();
1988 goto L;
1990 (3) // This one's really only interesting for targets that can do
1991 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1992 // it results in multiple branches on a cache line, which often
1993 // does not sit well with predictors.
1995 if (test1) goto E; // predicted not taken
1996 x = a;
1997 if (test2) goto F;
2000 x = b;
2003 becomes
2005 x = a;
2006 if (test1) goto E;
2007 if (test2) goto F;
2009 Notes:
2011 (A) Don't do (2) if the branch is predicted against the block we're
2012 eliminating. Do it anyway if we can eliminate a branch; this requires
2013 that the sole successor of the eliminated block postdominate the other
2014 side of the if.
2016 (B) With CE, on (3) we can steal from both sides of the if, creating
2018 if (test1) x = a;
2019 if (!test1) x = b;
2020 if (test1) goto J;
2021 if (test2) goto F;
2025 Again, this is most useful if J postdominates.
2027 (C) CE substitutes for helpful life information.
2029 (D) These heuristics need a lot of work. */
2031 /* Tests for case 1 above. */
2033 static int
2034 find_if_case_1 (test_bb, then_edge, else_edge)
2035 basic_block test_bb;
2036 edge then_edge, else_edge;
2038 basic_block then_bb = then_edge->dest;
2039 basic_block else_bb = else_edge->dest;
2040 edge then_succ = then_bb->succ;
2041 rtx new_lab;
2043 /* THEN has one successor. */
2044 if (!then_succ || then_succ->succ_next != NULL)
2045 return FALSE;
2047 /* THEN does not fall through, but is not strange either. */
2048 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2049 return FALSE;
2051 /* THEN has one predecessor. */
2052 if (then_bb->pred->pred_next != NULL)
2053 return FALSE;
2055 /* ELSE follows THEN. (??? could be moved) */
2056 if (else_bb->index != then_bb->index + 1)
2057 return FALSE;
2059 num_possible_if_blocks++;
2060 if (rtl_dump_file)
2061 fprintf (rtl_dump_file,
2062 "\nIF-CASE-1 found, start %d, then %d\n",
2063 test_bb->index, then_bb->index);
2065 /* THEN is small. */
2066 if (count_bb_insns (then_bb) > BRANCH_COST)
2067 return FALSE;
2069 /* Find the label for THEN's destination. */
2070 if (then_succ->dest == EXIT_BLOCK_PTR)
2071 new_lab = NULL_RTX;
2072 else
2074 new_lab = JUMP_LABEL (then_bb->end);
2075 if (! new_lab)
2076 abort ();
2079 /* Registers set are dead, or are predicable. */
2080 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2081 return FALSE;
2083 /* Conversion went ok, including moving the insns and fixing up the
2084 jump. Adjust the CFG to match. */
2086 SET_UPDATE_LIFE (test_bb);
2087 bitmap_operation (test_bb->global_live_at_end,
2088 else_bb->global_live_at_start,
2089 then_bb->global_live_at_end, BITMAP_IOR);
2091 make_edge (NULL, test_bb, then_succ->dest, 0);
2092 flow_delete_block (then_bb);
2093 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2095 num_removed_blocks++;
2096 num_updated_if_blocks++;
2098 return TRUE;
2101 /* Test for case 2 above. */
2103 static int
2104 find_if_case_2 (test_bb, then_edge, else_edge)
2105 basic_block test_bb;
2106 edge then_edge, else_edge;
2108 basic_block then_bb = then_edge->dest;
2109 basic_block else_bb = else_edge->dest;
2110 edge else_succ = else_bb->succ;
2111 rtx new_lab, note;
2113 /* ELSE has one successor. */
2114 if (!else_succ || else_succ->succ_next != NULL)
2115 return FALSE;
2117 /* ELSE outgoing edge is not complex. */
2118 if (else_succ->flags & EDGE_COMPLEX)
2119 return FALSE;
2121 /* ELSE has one predecessor. */
2122 if (else_bb->pred->pred_next != NULL)
2123 return FALSE;
2125 /* THEN is not EXIT. */
2126 if (then_bb->index < 0)
2127 return FALSE;
2129 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2130 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2131 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2133 else if (else_succ->dest->index < 0
2134 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2135 ORIG_INDEX (else_succ->dest)))
2137 else
2138 return FALSE;
2140 num_possible_if_blocks++;
2141 if (rtl_dump_file)
2142 fprintf (rtl_dump_file,
2143 "\nIF-CASE-2 found, start %d, else %d\n",
2144 test_bb->index, else_bb->index);
2146 /* ELSE is small. */
2147 if (count_bb_insns (then_bb) > BRANCH_COST)
2148 return FALSE;
2150 /* Find the label for ELSE's destination. */
2151 if (else_succ->dest == EXIT_BLOCK_PTR)
2152 new_lab = NULL_RTX;
2153 else
2155 if (else_succ->flags & EDGE_FALLTHRU)
2157 new_lab = else_succ->dest->head;
2158 if (GET_CODE (new_lab) != CODE_LABEL)
2159 abort ();
2161 else
2163 new_lab = JUMP_LABEL (else_bb->end);
2164 if (! new_lab)
2165 abort ();
2169 /* Registers set are dead, or are predicable. */
2170 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2171 return FALSE;
2173 /* Conversion went ok, including moving the insns and fixing up the
2174 jump. Adjust the CFG to match. */
2176 SET_UPDATE_LIFE (test_bb);
2177 bitmap_operation (test_bb->global_live_at_end,
2178 then_bb->global_live_at_start,
2179 else_bb->global_live_at_end, BITMAP_IOR);
2181 remove_edge (else_edge);
2182 make_edge (NULL, test_bb, else_succ->dest, 0);
2183 flow_delete_block (else_bb);
2185 num_removed_blocks++;
2186 num_updated_if_blocks++;
2188 /* ??? We may now fallthru from one of THEN's successors into a join
2189 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2191 return TRUE;
2194 /* A subroutine of dead_or_predicable called through for_each_rtx.
2195 Return 1 if a memory is found. */
2197 static int
2198 find_memory (px, data)
2199 rtx *px;
2200 void *data ATTRIBUTE_UNUSED;
2202 return GET_CODE (*px) == MEM;
2205 /* Used by the code above to perform the actual rtl transformations.
2206 Return TRUE if successful.
2208 TEST_BB is the block containing the conditional branch. MERGE_BB
2209 is the block containing the code to manipulate. NEW_DEST is the
2210 label TEST_BB should be branching to after the conversion.
2211 REVERSEP is true if the sense of the branch should be reversed. */
2213 static int
2214 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2215 basic_block test_bb, merge_bb, other_bb;
2216 rtx new_dest;
2217 int reversep;
2219 rtx head, end, jump, earliest, old_dest;
2221 jump = test_bb->end;
2223 /* Find the extent of the real code in the merge block. */
2224 head = merge_bb->head;
2225 end = merge_bb->end;
2227 if (GET_CODE (head) == CODE_LABEL)
2228 head = NEXT_INSN (head);
2229 if (GET_CODE (head) == NOTE)
2231 if (head == end)
2233 head = end = NULL_RTX;
2234 goto no_body;
2236 head = NEXT_INSN (head);
2239 if (GET_CODE (end) == JUMP_INSN)
2241 if (head == end)
2243 head = end = NULL_RTX;
2244 goto no_body;
2246 end = PREV_INSN (end);
2249 /* Disable handling dead code by conditional execution if the machine needs
2250 to do anything funny with the tests, etc. */
2251 #ifndef IFCVT_MODIFY_TESTS
2252 if (HAVE_conditional_execution)
2254 /* In the conditional execution case, we have things easy. We know
2255 the condition is reversable. We don't have to check life info,
2256 becase we're going to conditionally execute the code anyway.
2257 All that's left is making sure the insns involved can actually
2258 be predicated. */
2260 rtx cond, prob_val;
2262 cond = cond_exec_get_condition (jump);
2264 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2265 if (prob_val)
2266 prob_val = XEXP (prob_val, 0);
2268 if (reversep)
2270 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2271 GET_MODE (cond), XEXP (cond, 0),
2272 XEXP (cond, 1));
2273 if (prob_val)
2274 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2277 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2278 goto cancel;
2280 earliest = jump;
2282 else
2283 #endif
2285 /* In the non-conditional execution case, we have to verify that there
2286 are no trapping operations, no calls, no references to memory, and
2287 that any registers modified are dead at the branch site. */
2289 rtx insn, cond, prev;
2290 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2291 regset merge_set, tmp, test_live, test_set;
2292 struct propagate_block_info *pbi;
2293 int i, fail = 0;
2295 /* Check for no calls or trapping operations. */
2296 for (insn = head; ; insn = NEXT_INSN (insn))
2298 if (GET_CODE (insn) == CALL_INSN)
2299 return FALSE;
2300 if (INSN_P (insn))
2302 if (may_trap_p (PATTERN (insn)))
2303 return FALSE;
2305 /* ??? Even non-trapping memories such as stack frame
2306 references must be avoided. For stores, we collect
2307 no lifetime info; for reads, we'd have to assert
2308 true_dependance false against every store in the
2309 TEST range. */
2310 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2311 return FALSE;
2313 if (insn == end)
2314 break;
2317 if (! any_condjump_p (jump))
2318 return FALSE;
2320 /* Find the extent of the conditional. */
2321 cond = noce_get_condition (jump, &earliest);
2322 if (! cond)
2323 return FALSE;
2325 /* Collect:
2326 MERGE_SET = set of registers set in MERGE_BB
2327 TEST_LIVE = set of registers live at EARLIEST
2328 TEST_SET = set of registers set between EARLIEST and the
2329 end of the block. */
2331 tmp = INITIALIZE_REG_SET (tmp_head);
2332 merge_set = INITIALIZE_REG_SET (merge_set_head);
2333 test_live = INITIALIZE_REG_SET (test_live_head);
2334 test_set = INITIALIZE_REG_SET (test_set_head);
2336 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2337 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2338 since we've already asserted that MERGE_BB is small. */
2339 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2341 /* For small register class machines, don't lengthen lifetimes of
2342 hard registers before reload. */
2343 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2345 EXECUTE_IF_SET_IN_BITMAP
2346 (merge_set, 0, i,
2348 if (i < FIRST_PSEUDO_REGISTER
2349 && ! fixed_regs[i]
2350 && ! global_regs[i])
2351 fail = 1;
2355 /* For TEST, we're interested in a range of insns, not a whole block.
2356 Moreover, we're interested in the insns live from OTHER_BB. */
2358 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2359 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2362 for (insn = jump; ; insn = prev)
2364 prev = propagate_one_insn (pbi, insn);
2365 if (insn == earliest)
2366 break;
2369 free_propagate_block_info (pbi);
2371 /* We can perform the transformation if
2372 MERGE_SET & (TEST_SET | TEST_LIVE)
2374 TEST_SET & merge_bb->global_live_at_start
2375 are empty. */
2377 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2378 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2379 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2381 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2382 BITMAP_AND);
2383 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2385 FREE_REG_SET (tmp);
2386 FREE_REG_SET (merge_set);
2387 FREE_REG_SET (test_live);
2388 FREE_REG_SET (test_set);
2390 if (fail)
2391 return FALSE;
2394 no_body:
2395 /* We don't want to use normal invert_jump or redirect_jump because
2396 we don't want to delete_insn called. Also, we want to do our own
2397 change group management. */
2399 old_dest = JUMP_LABEL (jump);
2400 if (reversep
2401 ? ! invert_jump_1 (jump, new_dest)
2402 : ! redirect_jump_1 (jump, new_dest))
2403 goto cancel;
2405 if (! apply_change_group ())
2406 return FALSE;
2408 if (old_dest)
2409 LABEL_NUSES (old_dest) -= 1;
2410 if (new_dest)
2411 LABEL_NUSES (new_dest) += 1;
2412 JUMP_LABEL (jump) = new_dest;
2414 if (reversep)
2416 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2417 if (note)
2418 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2421 /* Move the insns out of MERGE_BB to before the branch. */
2422 if (head != NULL)
2424 if (end == merge_bb->end)
2425 merge_bb->end = PREV_INSN (head);
2427 head = squeeze_notes (head, end);
2428 if (GET_CODE (end) == NOTE
2429 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2430 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2431 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2432 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2433 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2434 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2436 if (head == end)
2437 return TRUE;
2438 end = PREV_INSN (end);
2441 reorder_insns (head, end, PREV_INSN (earliest));
2443 return TRUE;
2445 cancel:
2446 cancel_changes (0);
2447 return FALSE;
2450 /* Main entry point for all if-conversion. */
2452 void
2453 if_convert (life_data_ok)
2454 int life_data_ok;
2456 int block_num;
2458 num_possible_if_blocks = 0;
2459 num_updated_if_blocks = 0;
2460 num_removed_blocks = 0;
2462 /* Free up basic_block_for_insn so that we don't have to keep it
2463 up to date, either here or in merge_blocks_nomove. */
2464 free_basic_block_vars (1);
2466 /* Compute postdominators if we think we'll use them. */
2467 post_dominators = NULL;
2468 if (HAVE_conditional_execution || life_data_ok)
2470 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2471 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2474 /* Record initial block numbers. */
2475 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2476 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2478 /* Go through each of the basic blocks looking for things to convert. */
2479 for (block_num = 0; block_num < n_basic_blocks; )
2481 basic_block bb = BASIC_BLOCK (block_num);
2482 if (find_if_header (bb))
2483 block_num = bb->index;
2484 else
2485 block_num++;
2488 if (post_dominators)
2489 sbitmap_vector_free (post_dominators);
2491 if (rtl_dump_file)
2492 fflush (rtl_dump_file);
2494 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2495 compute_bb_for_insn (get_max_uid ());
2497 /* Rebuild life info for basic blocks that require it. */
2498 if (num_removed_blocks && life_data_ok)
2500 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2501 sbitmap_zero (update_life_blocks);
2503 /* If we allocated new pseudos, we must resize the array for sched1. */
2504 if (max_regno < max_reg_num ())
2506 max_regno = max_reg_num ();
2507 allocate_reg_info (max_regno, FALSE, FALSE);
2510 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2511 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2512 SET_BIT (update_life_blocks, block_num);
2514 count_or_remove_death_notes (update_life_blocks, 1);
2515 /* ??? See about adding a mode that verifies that the initial
2516 set of blocks don't let registers come live. */
2517 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2518 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2519 | PROP_KILL_DEAD_CODE);
2521 sbitmap_free (update_life_blocks);
2524 /* Write the final stats. */
2525 if (rtl_dump_file && num_possible_if_blocks > 0)
2527 fprintf (rtl_dump_file,
2528 "\n%d possible IF blocks searched.\n",
2529 num_possible_if_blocks);
2530 fprintf (rtl_dump_file,
2531 "%d IF blocks converted.\n",
2532 num_updated_if_blocks);
2533 fprintf (rtl_dump_file,
2534 "%d basic blocks deleted.\n\n\n",
2535 num_removed_blocks);
2538 #ifdef ENABLE_CHECKING
2539 verify_flow_info ();
2540 #endif