* extend.texi: Improve documentation of volatile asms.
[official-gcc.git] / gcc / ifcvt.c
blob541f945261aa7155089c7c7f6c7917f6ddb31d10
1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "tm_p.h"
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
39 #endif
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
42 #endif
43 #ifndef HAVE_incscc
44 #define HAVE_incscc 0
45 #endif
46 #ifndef HAVE_decscc
47 #define HAVE_decscc 0
48 #endif
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
52 #endif
54 #define NULL_EDGE ((struct edge_def *)NULL)
55 #define NULL_BLOCK ((struct basic_block_def *)NULL)
57 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
58 static int num_possible_if_blocks;
60 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
61 execution. */
62 static int num_updated_if_blocks;
64 /* # of basic blocks that were removed. */
65 static int num_removed_blocks;
67 /* The post-dominator relation on the original block numbers. */
68 static sbitmap *post_dominators;
70 /* Forward references. */
71 static int count_bb_insns PARAMS ((basic_block));
72 static rtx first_active_insn PARAMS ((basic_block));
73 static int last_active_insn_p PARAMS ((basic_block, rtx));
74 static int seq_contains_jump PARAMS ((rtx));
76 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
77 static rtx cond_exec_get_condition PARAMS ((rtx));
78 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
79 basic_block, basic_block));
81 static rtx noce_get_condition PARAMS ((rtx, rtx *));
82 static int noce_process_if_block PARAMS ((basic_block, basic_block,
83 basic_block, basic_block));
85 static int process_if_block PARAMS ((basic_block, basic_block,
86 basic_block, basic_block));
87 static void merge_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
90 static int find_if_header PARAMS ((basic_block));
91 static int find_if_block PARAMS ((basic_block, edge, edge));
92 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
93 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
94 static int find_memory PARAMS ((rtx *, void *));
95 static int dead_or_predicable PARAMS ((basic_block, basic_block,
96 basic_block, rtx, int));
97 static void noce_emit_move_insn PARAMS ((rtx, rtx));
99 /* Abuse the basic_block AUX field to store the original block index,
100 as well as a flag indicating that the block should be rescaned for
101 life analysis. */
103 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
104 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
105 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
106 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
109 /* Count the number of non-jump active insns in BB. */
111 static int
112 count_bb_insns (bb)
113 basic_block bb;
115 int count = 0;
116 rtx insn = bb->head;
118 while (1)
120 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
121 count++;
123 if (insn == bb->end)
124 break;
125 insn = NEXT_INSN (insn);
128 return count;
131 /* Return the first non-jump active insn in the basic block. */
133 static rtx
134 first_active_insn (bb)
135 basic_block bb;
137 rtx insn = bb->head;
139 if (GET_CODE (insn) == CODE_LABEL)
141 if (insn == bb->end)
142 return NULL_RTX;
143 insn = NEXT_INSN (insn);
146 while (GET_CODE (insn) == NOTE)
148 if (insn == bb->end)
149 return NULL_RTX;
150 insn = NEXT_INSN (insn);
153 if (GET_CODE (insn) == JUMP_INSN)
154 return NULL_RTX;
156 return insn;
159 /* Return true if INSN is the last active non-jump insn in BB. */
161 static int
162 last_active_insn_p (bb, insn)
163 basic_block bb;
164 rtx insn;
168 if (insn == bb->end)
169 return TRUE;
170 insn = NEXT_INSN (insn);
172 while (GET_CODE (insn) == NOTE);
174 return GET_CODE (insn) == JUMP_INSN;
177 /* It is possible, especially when having dealt with multi-word
178 arithmetic, for the expanders to have emitted jumps. Search
179 through the sequence and return TRUE if a jump exists so that
180 we can abort the conversion. */
182 static int
183 seq_contains_jump (insn)
184 rtx insn;
186 while (insn)
188 if (GET_CODE (insn) == JUMP_INSN)
189 return 1;
190 insn = NEXT_INSN (insn);
192 return 0;
195 /* Go through a bunch of insns, converting them to conditional
196 execution format if possible. Return TRUE if all of the non-note
197 insns were processed. */
199 static int
200 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
201 rtx start; /* first insn to look at */
202 rtx end; /* last insn to look at */
203 rtx test; /* conditional execution test */
204 rtx prob_val; /* probability of branch taken. */
205 int mod_ok; /* true if modifications ok last insn. */
207 int must_be_last = FALSE;
208 rtx insn;
209 rtx pattern;
211 for (insn = start; ; insn = NEXT_INSN (insn))
213 if (GET_CODE (insn) == NOTE)
214 goto insn_done;
216 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
217 abort ();
219 /* Remove USE insns that get in the way. */
220 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
222 /* ??? Ug. Actually unlinking the thing is problematic,
223 given what we'd have to coordinate with our callers. */
224 PUT_CODE (insn, NOTE);
225 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
226 NOTE_SOURCE_FILE (insn) = 0;
227 goto insn_done;
230 /* Last insn wasn't last? */
231 if (must_be_last)
232 return FALSE;
234 if (modified_in_p (test, insn))
236 if (!mod_ok)
237 return FALSE;
238 must_be_last = TRUE;
241 /* Now build the conditional form of the instruction. */
242 pattern = PATTERN (insn);
244 /* If the machine needs to modify the insn being conditionally executed,
245 say for example to force a constant integer operand into a temp
246 register, do so here. */
247 #ifdef IFCVT_MODIFY_INSN
248 IFCVT_MODIFY_INSN (pattern, insn);
249 if (! pattern)
250 return FALSE;
251 #endif
253 validate_change (insn, &PATTERN (insn),
254 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
255 pattern), 1);
257 if (GET_CODE (insn) == CALL_INSN && prob_val)
258 validate_change (insn, &REG_NOTES (insn),
259 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
260 REG_NOTES (insn)), 1);
262 insn_done:
263 if (insn == end)
264 break;
267 return TRUE;
270 /* Return the condition for a jump. Do not do any special processing. */
272 static rtx
273 cond_exec_get_condition (jump)
274 rtx jump;
276 rtx test_if, cond;
278 if (any_condjump_p (jump))
279 test_if = SET_SRC (pc_set (jump));
280 else
281 return NULL_RTX;
282 cond = XEXP (test_if, 0);
284 /* If this branches to JUMP_LABEL when the condition is false,
285 reverse the condition. */
286 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
287 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
288 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
289 GET_MODE (cond), XEXP (cond, 0),
290 XEXP (cond, 1));
292 return cond;
295 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
296 to conditional execution. Return TRUE if we were successful at
297 converting the the block. */
299 static int
300 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
301 basic_block test_bb; /* Basic block test is in */
302 basic_block then_bb; /* Basic block for THEN block */
303 basic_block else_bb; /* Basic block for ELSE block */
304 basic_block join_bb; /* Basic block the join label is in */
306 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
307 rtx then_start; /* first insn in THEN block */
308 rtx then_end; /* last insn + 1 in THEN block */
309 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
310 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
311 int max; /* max # of insns to convert. */
312 int then_mod_ok; /* whether conditional mods are ok in THEN */
313 rtx true_expr; /* test for else block insns */
314 rtx false_expr; /* test for then block insns */
315 rtx true_prob_val; /* probability of else block */
316 rtx false_prob_val; /* probability of then block */
317 int n_insns;
319 /* Find the conditional jump to the ELSE or JOIN part, and isolate
320 the test. */
321 test_expr = cond_exec_get_condition (test_bb->end);
322 if (! test_expr)
323 return FALSE;
325 /* If the conditional jump is more than just a conditional jump,
326 then we can not do conditional execution conversion on this block. */
327 if (!onlyjump_p (test_bb->end))
328 return FALSE;
330 /* Collect the bounds of where we're to search. */
332 then_start = then_bb->head;
333 then_end = then_bb->end;
335 /* Skip a label heading THEN block. */
336 if (GET_CODE (then_start) == CODE_LABEL)
337 then_start = NEXT_INSN (then_start);
339 /* Skip a (use (const_int 0)) or branch as the final insn. */
340 if (GET_CODE (then_end) == INSN
341 && GET_CODE (PATTERN (then_end)) == USE
342 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
343 then_end = PREV_INSN (then_end);
344 else if (GET_CODE (then_end) == JUMP_INSN)
345 then_end = PREV_INSN (then_end);
347 if (else_bb)
349 /* Skip the ELSE block's label. */
350 else_start = NEXT_INSN (else_bb->head);
351 else_end = else_bb->end;
353 /* Skip a (use (const_int 0)) or branch as the final insn. */
354 if (GET_CODE (else_end) == INSN
355 && GET_CODE (PATTERN (else_end)) == USE
356 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
357 else_end = PREV_INSN (else_end);
358 else if (GET_CODE (else_end) == JUMP_INSN)
359 else_end = PREV_INSN (else_end);
362 /* How many instructions should we convert in total? */
363 n_insns = 0;
364 if (else_bb)
366 max = 2 * MAX_CONDITIONAL_EXECUTE;
367 n_insns = count_bb_insns (else_bb);
369 else
370 max = MAX_CONDITIONAL_EXECUTE;
371 n_insns += count_bb_insns (then_bb);
372 if (n_insns > max)
373 return FALSE;
375 /* Map test_expr/test_jump into the appropriate MD tests to use on
376 the conditionally executed code. */
378 true_expr = test_expr;
379 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
380 GET_MODE (true_expr), XEXP (true_expr, 0),
381 XEXP (true_expr, 1));
383 #ifdef IFCVT_MODIFY_TESTS
384 /* If the machine description needs to modify the tests, such as setting a
385 conditional execution register from a comparison, it can do so here. */
386 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
387 join_bb);
389 /* See if the conversion failed */
390 if (!true_expr || !false_expr)
391 goto fail;
392 #endif
394 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
395 if (true_prob_val)
397 true_prob_val = XEXP (true_prob_val, 0);
398 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
400 else
401 false_prob_val = NULL_RTX;
403 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
404 on then THEN block. */
405 then_mod_ok = (else_bb == NULL_BLOCK);
407 /* Go through the THEN and ELSE blocks converting the insns if possible
408 to conditional execution. */
410 if (then_end
411 && ! cond_exec_process_insns (then_start, then_end,
412 false_expr, false_prob_val, then_mod_ok))
413 goto fail;
415 if (else_bb
416 && ! cond_exec_process_insns (else_start, else_end,
417 true_expr, true_prob_val, TRUE))
418 goto fail;
420 if (! apply_change_group ())
421 return FALSE;
423 #ifdef IFCVT_MODIFY_FINAL
424 /* Do any machine dependent final modifications */
425 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
426 #endif
428 /* Conversion succeeded. */
429 if (rtl_dump_file)
430 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
431 n_insns, (n_insns == 1) ? " was" : "s were");
433 /* Merge the blocks! */
434 merge_if_block (test_bb, then_bb, else_bb, join_bb);
435 return TRUE;
437 fail:
438 #ifdef IFCVT_MODIFY_CANCEL
439 /* Cancel any machine dependent changes. */
440 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
441 #endif
443 cancel_changes (0);
444 return FALSE;
447 /* Used by noce_process_if_block to communicate with its subroutines.
449 The subroutines know that A and B may be evaluated freely. They
450 know that X is a register. They should insert new instructions
451 before cond_earliest. */
453 struct noce_if_info
455 rtx insn_a, insn_b;
456 rtx x, a, b;
457 rtx jump, cond, cond_earliest;
460 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
461 rtx, int, int));
462 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
463 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
464 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
465 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
466 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
467 rtx, enum rtx_code, rtx,
468 rtx, rtx, rtx));
469 static int noce_try_cmove PARAMS ((struct noce_if_info *));
470 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
472 /* Helper function for noce_try_store_flag*. */
474 static rtx
475 noce_emit_store_flag (if_info, x, reversep, normalize)
476 struct noce_if_info *if_info;
477 rtx x;
478 int reversep, normalize;
480 rtx cond = if_info->cond;
481 int cond_complex;
482 enum rtx_code code;
484 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
485 || ! general_operand (XEXP (cond, 1), VOIDmode));
487 /* If earliest == jump, or when the condition is complex, try to
488 build the store_flag insn directly. */
490 if (cond_complex)
491 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
493 if ((if_info->cond_earliest == if_info->jump || cond_complex)
494 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
496 rtx tmp;
498 code = GET_CODE (cond);
499 if (reversep)
500 code = reverse_condition (code);
502 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
503 XEXP (cond, 1));
504 tmp = gen_rtx_SET (VOIDmode, x, tmp);
506 start_sequence ();
507 tmp = emit_insn (tmp);
509 if (recog_memoized (tmp) >= 0)
511 tmp = get_insns ();
512 end_sequence ();
513 emit_insns (tmp);
515 if_info->cond_earliest = if_info->jump;
517 return x;
520 end_sequence ();
523 /* Don't even try if the comparison operands are weird. */
524 if (cond_complex)
525 return NULL_RTX;
527 code = GET_CODE (cond);
528 if (reversep)
529 code = reverse_condition (code);
531 return emit_store_flag (x, code, XEXP (cond, 0),
532 XEXP (cond, 1), VOIDmode,
533 (code == LTU || code == LEU
534 || code == GEU || code == GTU), normalize);
537 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
538 static void
539 noce_emit_move_insn (x, y)
540 rtx x, y;
542 enum machine_mode outmode, inmode;
543 rtx outer, inner;
544 int bitpos;
546 if (GET_CODE (x) != STRICT_LOW_PART)
548 emit_move_insn (x, y);
549 return;
552 outer = XEXP (x, 0);
553 inner = XEXP (outer, 0);
554 outmode = GET_MODE (outer);
555 inmode = GET_MODE (inner);
556 bitpos = SUBREG_WORD (outer) * BITS_PER_WORD;
557 if (BYTES_BIG_ENDIAN)
558 bitpos += (GET_MODE_BITSIZE (inmode) - GET_MODE_BITSIZE (outmode))
559 % BITS_PER_WORD;
560 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
561 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
562 GET_MODE_BITSIZE (inmode));
565 /* Convert "if (test) x = 1; else x = 0".
567 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
568 tried in noce_try_store_flag_constants after noce_try_cmove has had
569 a go at the conversion. */
571 static int
572 noce_try_store_flag (if_info)
573 struct noce_if_info *if_info;
575 int reversep;
576 rtx target, seq;
578 if (GET_CODE (if_info->b) == CONST_INT
579 && INTVAL (if_info->b) == STORE_FLAG_VALUE
580 && if_info->a == const0_rtx)
581 reversep = 0;
582 else if (if_info->b == const0_rtx
583 && GET_CODE (if_info->a) == CONST_INT
584 && INTVAL (if_info->a) == STORE_FLAG_VALUE
585 && can_reverse_comparison_p (if_info->cond, if_info->jump))
586 reversep = 1;
587 else
588 return FALSE;
590 start_sequence ();
592 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
593 if (target)
595 if (target != if_info->x)
596 noce_emit_move_insn (if_info->x, target);
598 seq = get_insns ();
599 end_sequence ();
600 emit_insns_before (seq, if_info->cond_earliest);
602 return TRUE;
604 else
606 end_sequence ();
607 return FALSE;
611 /* Convert "if (test) x = a; else x = b", for A and B constant. */
613 static int
614 noce_try_store_flag_constants (if_info)
615 struct noce_if_info *if_info;
617 rtx target, seq;
618 int reversep;
619 HOST_WIDE_INT itrue, ifalse, diff, tmp;
620 int normalize, can_reverse;
622 if (! no_new_pseudos
623 && GET_CODE (if_info->a) == CONST_INT
624 && GET_CODE (if_info->b) == CONST_INT)
626 ifalse = INTVAL (if_info->a);
627 itrue = INTVAL (if_info->b);
628 diff = itrue - ifalse;
630 can_reverse = can_reverse_comparison_p (if_info->cond, if_info->jump);
632 reversep = 0;
633 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
634 normalize = 0;
635 else if (ifalse == 0 && exact_log2 (itrue) >= 0
636 && (STORE_FLAG_VALUE == 1
637 || BRANCH_COST >= 2))
638 normalize = 1;
639 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
640 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
641 normalize = 1, reversep = 1;
642 else if (itrue == -1
643 && (STORE_FLAG_VALUE == -1
644 || BRANCH_COST >= 2))
645 normalize = -1;
646 else if (ifalse == -1 && can_reverse
647 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
648 normalize = -1, reversep = 1;
649 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
650 || BRANCH_COST >= 3)
651 normalize = -1;
652 else
653 return FALSE;
655 if (reversep)
657 tmp = itrue; itrue = ifalse; ifalse = tmp;
658 diff = -diff;
661 start_sequence ();
662 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
663 if (! target)
665 end_sequence ();
666 return FALSE;
669 /* if (test) x = 3; else x = 4;
670 => x = 3 + (test == 0); */
671 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
673 target = expand_binop (GET_MODE (if_info->x),
674 (diff == STORE_FLAG_VALUE
675 ? add_optab : sub_optab),
676 GEN_INT (ifalse), target, if_info->x, 0,
677 OPTAB_WIDEN);
680 /* if (test) x = 8; else x = 0;
681 => x = (test != 0) << 3; */
682 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
684 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
685 target, GEN_INT (tmp), if_info->x, 0,
686 OPTAB_WIDEN);
689 /* if (test) x = -1; else x = b;
690 => x = -(test != 0) | b; */
691 else if (itrue == -1)
693 target = expand_binop (GET_MODE (if_info->x), ior_optab,
694 target, GEN_INT (ifalse), if_info->x, 0,
695 OPTAB_WIDEN);
698 /* if (test) x = a; else x = b;
699 => x = (-(test != 0) & (b - a)) + a; */
700 else
702 target = expand_binop (GET_MODE (if_info->x), and_optab,
703 target, GEN_INT (diff), if_info->x, 0,
704 OPTAB_WIDEN);
705 if (target)
706 target = expand_binop (GET_MODE (if_info->x), add_optab,
707 target, GEN_INT (ifalse), if_info->x, 0,
708 OPTAB_WIDEN);
711 if (! target)
713 end_sequence ();
714 return FALSE;
717 if (target != if_info->x)
718 noce_emit_move_insn (if_info->x, target);
720 seq = get_insns ();
721 end_sequence ();
723 if (seq_contains_jump (seq))
724 return FALSE;
726 emit_insns_before (seq, if_info->cond_earliest);
728 return TRUE;
731 return FALSE;
734 /* Convert "if (test) foo++" into "foo += (test != 0)", and
735 similarly for "foo--". */
737 static int
738 noce_try_store_flag_inc (if_info)
739 struct noce_if_info *if_info;
741 rtx target, seq;
742 int subtract, normalize;
744 if (! no_new_pseudos
745 && (BRANCH_COST >= 2
746 || HAVE_incscc
747 || HAVE_decscc)
748 /* Should be no `else' case to worry about. */
749 && if_info->b == if_info->x
750 && GET_CODE (if_info->a) == PLUS
751 && (XEXP (if_info->a, 1) == const1_rtx
752 || XEXP (if_info->a, 1) == constm1_rtx)
753 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
754 && can_reverse_comparison_p (if_info->cond, if_info->jump))
756 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
757 subtract = 0, normalize = 0;
758 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
759 subtract = 1, normalize = 0;
760 else
761 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
763 start_sequence ();
765 target = noce_emit_store_flag (if_info,
766 gen_reg_rtx (GET_MODE (if_info->x)),
767 1, normalize);
769 if (target)
770 target = expand_binop (GET_MODE (if_info->x),
771 subtract ? sub_optab : add_optab,
772 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
773 if (target)
775 if (target != if_info->x)
776 noce_emit_move_insn (if_info->x, target);
778 seq = get_insns ();
779 end_sequence ();
781 if (seq_contains_jump (seq))
782 return FALSE;
784 emit_insns_before (seq, if_info->cond_earliest);
786 return TRUE;
789 end_sequence ();
792 return FALSE;
795 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
797 static int
798 noce_try_store_flag_mask (if_info)
799 struct noce_if_info *if_info;
801 rtx target, seq;
802 int reversep;
804 reversep = 0;
805 if (! no_new_pseudos
806 && (BRANCH_COST >= 2
807 || STORE_FLAG_VALUE == -1)
808 && ((if_info->a == const0_rtx
809 && rtx_equal_p (if_info->b, if_info->x))
810 || ((reversep = can_reverse_comparison_p (if_info->cond,
811 if_info->jump))
812 && if_info->b == const0_rtx
813 && rtx_equal_p (if_info->a, if_info->x))))
815 start_sequence ();
816 target = noce_emit_store_flag (if_info,
817 gen_reg_rtx (GET_MODE (if_info->x)),
818 reversep, -1);
819 if (target)
820 target = expand_binop (GET_MODE (if_info->x), and_optab,
821 if_info->x, target, if_info->x, 0,
822 OPTAB_WIDEN);
824 if (target)
826 if (target != if_info->x)
827 noce_emit_move_insn (if_info->x, target);
829 seq = get_insns ();
830 end_sequence ();
832 if (seq_contains_jump (seq))
833 return FALSE;
835 emit_insns_before (seq, if_info->cond_earliest);
837 return TRUE;
840 end_sequence ();
843 return FALSE;
846 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
848 static rtx
849 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
850 struct noce_if_info *if_info;
851 rtx x, cmp_a, cmp_b, vfalse, vtrue;
852 enum rtx_code code;
854 /* If earliest == jump, try to build the cmove insn directly.
855 This is helpful when combine has created some complex condition
856 (like for alpha's cmovlbs) that we can't hope to regenerate
857 through the normal interface. */
859 if (if_info->cond_earliest == if_info->jump)
861 rtx tmp;
863 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
864 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
865 tmp = gen_rtx_SET (VOIDmode, x, tmp);
867 start_sequence ();
868 tmp = emit_insn (tmp);
870 if (recog_memoized (tmp) >= 0)
872 tmp = get_insns ();
873 end_sequence ();
874 emit_insns (tmp);
876 return x;
879 end_sequence ();
882 /* Don't even try if the comparison operands are weird. */
883 if (! general_operand (cmp_a, GET_MODE (cmp_a))
884 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
885 return NULL_RTX;
887 #if HAVE_conditional_move
888 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
889 vtrue, vfalse, GET_MODE (x),
890 (code == LTU || code == GEU
891 || code == LEU || code == GTU));
892 #else
893 /* We'll never get here, as noce_process_if_block doesn't call the
894 functions involved. Ifdef code, however, should be discouraged
895 because it leads to typos in the code not selected. However,
896 emit_conditional_move won't exist either. */
897 return NULL_RTX;
898 #endif
901 /* Try only simple constants and registers here. More complex cases
902 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
903 has had a go at it. */
905 static int
906 noce_try_cmove (if_info)
907 struct noce_if_info *if_info;
909 enum rtx_code code;
910 rtx target, seq;
912 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
913 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
915 start_sequence ();
917 code = GET_CODE (if_info->cond);
918 target = noce_emit_cmove (if_info, if_info->x, code,
919 XEXP (if_info->cond, 0),
920 XEXP (if_info->cond, 1),
921 if_info->a, if_info->b);
923 if (target)
925 if (target != if_info->x)
926 noce_emit_move_insn (if_info->x, target);
928 seq = get_insns ();
929 end_sequence ();
930 emit_insns_before (seq, if_info->cond_earliest);
931 return TRUE;
933 else
935 end_sequence ();
936 return FALSE;
940 return FALSE;
943 /* Try more complex cases involving conditional_move. */
945 static int
946 noce_try_cmove_arith (if_info)
947 struct noce_if_info *if_info;
949 rtx a = if_info->a;
950 rtx b = if_info->b;
951 rtx x = if_info->x;
952 rtx insn_a, insn_b;
953 rtx tmp, target;
954 int is_mem = 0;
955 enum rtx_code code;
957 /* A conditional move from two memory sources is equivalent to a
958 conditional on their addresses followed by a load. Don't do this
959 early because it'll screw alias analysis. Note that we've
960 already checked for no side effects. */
961 if (! no_new_pseudos && cse_not_expected
962 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
963 && BRANCH_COST >= 5)
965 a = XEXP (a, 0);
966 b = XEXP (b, 0);
967 x = gen_reg_rtx (Pmode);
968 is_mem = 1;
971 /* ??? We could handle this if we knew that a load from A or B could
972 not fault. This is also true if we've already loaded
973 from the address along the path from ENTRY. */
974 else if (may_trap_p (a) || may_trap_p (b))
975 return FALSE;
977 /* if (test) x = a + b; else x = c - d;
978 => y = a + b;
979 x = c - d;
980 if (test)
981 x = y;
984 code = GET_CODE (if_info->cond);
985 insn_a = if_info->insn_a;
986 insn_b = if_info->insn_b;
988 /* Possibly rearrange operands to make things come out more natural. */
989 if (can_reverse_comparison_p (if_info->cond, if_info->jump))
991 int reversep = 0;
992 if (rtx_equal_p (b, x))
993 reversep = 1;
994 else if (general_operand (b, GET_MODE (b)))
995 reversep = 1;
997 if (reversep)
999 code = reverse_condition (code);
1000 tmp = a, a = b, b = tmp;
1001 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1005 start_sequence ();
1007 /* If either operand is complex, load it into a register first.
1008 The best way to do this is to copy the original insn. In this
1009 way we preserve any clobbers etc that the insn may have had.
1010 This is of course not possible in the IS_MEM case. */
1011 if (! general_operand (a, GET_MODE (a)))
1013 rtx set;
1015 if (no_new_pseudos)
1016 goto end_seq_and_fail;
1018 if (is_mem)
1020 tmp = gen_reg_rtx (GET_MODE (a));
1021 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1023 else if (! insn_a)
1024 goto end_seq_and_fail;
1025 else
1027 a = gen_reg_rtx (GET_MODE (a));
1028 tmp = copy_rtx (insn_a);
1029 set = single_set (tmp);
1030 SET_DEST (set) = a;
1031 tmp = emit_insn (PATTERN (tmp));
1033 if (recog_memoized (tmp) < 0)
1034 goto end_seq_and_fail;
1036 if (! general_operand (b, GET_MODE (b)))
1038 rtx set;
1040 if (no_new_pseudos)
1041 goto end_seq_and_fail;
1043 if (is_mem)
1045 tmp = gen_reg_rtx (GET_MODE (b));
1046 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1048 else if (! insn_b)
1049 goto end_seq_and_fail;
1050 else
1052 b = gen_reg_rtx (GET_MODE (b));
1053 tmp = copy_rtx (insn_b);
1054 set = single_set (tmp);
1055 SET_DEST (set) = b;
1056 tmp = emit_insn (PATTERN (tmp));
1058 if (recog_memoized (tmp) < 0)
1059 goto end_seq_and_fail;
1062 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1063 XEXP (if_info->cond, 1), a, b);
1065 if (! target)
1066 goto end_seq_and_fail;
1068 /* If we're handling a memory for above, emit the load now. */
1069 if (is_mem)
1071 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1073 /* Copy over flags as appropriate. */
1074 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1075 MEM_VOLATILE_P (tmp) = 1;
1076 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1077 MEM_IN_STRUCT_P (tmp) = 1;
1078 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1079 MEM_SCALAR_P (tmp) = 1;
1080 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1081 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1083 noce_emit_move_insn (if_info->x, tmp);
1085 else if (target != x)
1086 noce_emit_move_insn (x, target);
1088 tmp = get_insns ();
1089 end_sequence ();
1090 emit_insns_before (tmp, if_info->cond_earliest);
1091 return TRUE;
1093 end_seq_and_fail:
1094 end_sequence ();
1095 return FALSE;
1098 /* Look for the condition for the jump first. We'd prefer to avoid
1099 get_condition if we can -- it tries to look back for the contents
1100 of an original compare. On targets that use normal integers for
1101 comparisons, e.g. alpha, this is wasteful. */
1103 static rtx
1104 noce_get_condition (jump, earliest)
1105 rtx jump;
1106 rtx *earliest;
1108 rtx cond;
1109 rtx set;
1111 /* If the condition variable is a register and is MODE_INT, accept it.
1112 Otherwise, fall back on get_condition. */
1114 if (! any_condjump_p (jump))
1115 return NULL_RTX;
1117 set = pc_set (jump);
1119 cond = XEXP (SET_SRC (set), 0);
1120 if (GET_CODE (XEXP (cond, 0)) == REG
1121 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1123 *earliest = jump;
1125 /* If this branches to JUMP_LABEL when the condition is false,
1126 reverse the condition. */
1127 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1128 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1129 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1130 GET_MODE (cond), XEXP (cond, 0),
1131 XEXP (cond, 1));
1133 else
1134 cond = get_condition (jump, earliest);
1136 return cond;
1139 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1140 without using conditional execution. Return TRUE if we were
1141 successful at converting the the block. */
1143 static int
1144 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1145 basic_block test_bb; /* Basic block test is in */
1146 basic_block then_bb; /* Basic block for THEN block */
1147 basic_block else_bb; /* Basic block for ELSE block */
1148 basic_block join_bb; /* Basic block the join label is in */
1150 /* We're looking for patterns of the form
1152 (1) if (...) x = a; else x = b;
1153 (2) x = b; if (...) x = a;
1154 (3) if (...) x = a; // as if with an initial x = x.
1156 The later patterns require jumps to be more expensive.
1158 ??? For future expansion, look for multiple X in such patterns. */
1160 struct noce_if_info if_info;
1161 rtx insn_a, insn_b;
1162 rtx set_a, set_b;
1163 rtx orig_x, x, a, b;
1164 rtx jump, cond, insn;
1166 /* If this is not a standard conditional jump, we can't parse it. */
1167 jump = test_bb->end;
1168 cond = noce_get_condition (jump, &if_info.cond_earliest);
1169 if (! cond)
1170 return FALSE;
1172 /* If the conditional jump is more than just a conditional jump,
1173 then we can not do if-conversion on this block. */
1174 if (! onlyjump_p (jump))
1175 return FALSE;
1177 /* We must be comparing objects whose modes imply the size. */
1178 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1179 return FALSE;
1181 /* Look for one of the potential sets. */
1182 insn_a = first_active_insn (then_bb);
1183 if (! insn_a
1184 || ! last_active_insn_p (then_bb, insn_a)
1185 || (set_a = single_set (insn_a)) == NULL_RTX)
1186 return FALSE;
1188 x = SET_DEST (set_a);
1189 a = SET_SRC (set_a);
1191 /* Look for the other potential set. Make sure we've got equivalent
1192 destinations. */
1193 /* ??? This is overconservative. Storing to two different mems is
1194 as easy as conditionally computing the address. Storing to a
1195 single mem merely requires a scratch memory to use as one of the
1196 destination addresses; often the memory immediately below the
1197 stack pointer is available for this. */
1198 set_b = NULL_RTX;
1199 if (else_bb)
1201 insn_b = first_active_insn (else_bb);
1202 if (! insn_b
1203 || ! last_active_insn_p (else_bb, insn_b)
1204 || (set_b = single_set (insn_b)) == NULL_RTX
1205 || ! rtx_equal_p (x, SET_DEST (set_b)))
1206 return FALSE;
1208 else
1210 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1211 if (! insn_b
1212 || GET_CODE (insn_b) != INSN
1213 || (set_b = single_set (insn_b)) == NULL_RTX
1214 || ! rtx_equal_p (x, SET_DEST (set_b))
1215 || reg_mentioned_p (x, cond)
1216 || reg_mentioned_p (x, a)
1217 || reg_mentioned_p (x, SET_SRC (set_b)))
1218 insn_b = set_b = NULL_RTX;
1220 b = (set_b ? SET_SRC (set_b) : x);
1222 /* X may not be mentioned in the range (cond_earliest, jump]. */
1223 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1224 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1225 return FALSE;
1227 /* A and B may not be modified in the range [cond_earliest, jump). */
1228 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1229 if (INSN_P (insn)
1230 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1231 return FALSE;
1233 /* Only operate on register destinations, and even then avoid extending
1234 the lifetime of hard registers on small register class machines. */
1235 orig_x = x;
1236 if (GET_CODE (x) != REG
1237 || (SMALL_REGISTER_CLASSES
1238 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1240 if (no_new_pseudos)
1241 return FALSE;
1242 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1243 ? XEXP (x, 0) : x));
1246 /* Don't operate on sources that may trap or are volatile. */
1247 if (side_effects_p (a) || side_effects_p (b)
1248 || (GET_CODE (a) != MEM && may_trap_p (a))
1249 || (GET_CODE (b) != MEM && may_trap_p (b)))
1250 return FALSE;
1252 /* Set up the info block for our subroutines. */
1253 if_info.cond = cond;
1254 if_info.jump = jump;
1255 if_info.insn_a = insn_a;
1256 if_info.insn_b = insn_b;
1257 if_info.x = x;
1258 if_info.a = a;
1259 if_info.b = b;
1261 /* Try optimizations in some approximation of a useful order. */
1262 /* ??? Should first look to see if X is live incoming at all. If it
1263 isn't, we don't need anything but an unconditional set. */
1265 /* Look and see if A and B are really the same. Avoid creating silly
1266 cmove constructs that no one will fix up later. */
1267 if (rtx_equal_p (a, b))
1269 /* If we have an INSN_B, we don't have to create any new rtl. Just
1270 move the instruction that we already have. If we don't have an
1271 INSN_B, that means that A == X, and we've got a noop move. In
1272 that case don't do anything and let the code below delete INSN_A. */
1273 if (insn_b && else_bb)
1275 rtx note;
1277 if (else_bb && insn_b == else_bb->end)
1278 else_bb->end = PREV_INSN (insn_b);
1279 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1281 /* If there was a REG_EQUAL note, delete it since it may have been
1282 true due to this insn being after a jump. */
1283 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1284 remove_note (insn_b, note);
1286 insn_b = NULL_RTX;
1288 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1289 x must be executed twice. */
1290 else if (insn_b && side_effects_p (orig_x))
1291 return FALSE;
1293 x = orig_x;
1294 goto success;
1297 if (noce_try_store_flag (&if_info))
1298 goto success;
1299 if (HAVE_conditional_move
1300 && noce_try_cmove (&if_info))
1301 goto success;
1302 if (! HAVE_conditional_execution)
1304 if (noce_try_store_flag_constants (&if_info))
1305 goto success;
1306 if (noce_try_store_flag_inc (&if_info))
1307 goto success;
1308 if (noce_try_store_flag_mask (&if_info))
1309 goto success;
1310 if (HAVE_conditional_move
1311 && noce_try_cmove_arith (&if_info))
1312 goto success;
1315 return FALSE;
1317 success:
1318 /* The original sets may now be killed. */
1319 if (insn_a == then_bb->end)
1320 then_bb->end = PREV_INSN (insn_a);
1321 flow_delete_insn (insn_a);
1323 /* Several special cases here: First, we may have reused insn_b above,
1324 in which case insn_b is now NULL. Second, we want to delete insn_b
1325 if it came from the ELSE block, because follows the now correct
1326 write that appears in the TEST block. However, if we got insn_b from
1327 the TEST block, it may in fact be loading data needed for the comparison.
1328 We'll let life_analysis remove the insn if it's really dead. */
1329 if (insn_b && else_bb)
1331 if (insn_b == else_bb->end)
1332 else_bb->end = PREV_INSN (insn_b);
1333 flow_delete_insn (insn_b);
1336 /* The new insns will have been inserted before cond_earliest. We should
1337 be able to remove the jump with impunity, but the condition itself may
1338 have been modified by gcse to be shared across basic blocks. */
1339 test_bb->end = PREV_INSN (jump);
1340 flow_delete_insn (jump);
1342 /* If we used a temporary, fix it up now. */
1343 if (orig_x != x)
1345 start_sequence ();
1346 noce_emit_move_insn (orig_x, x);
1347 insn_b = gen_sequence ();
1348 end_sequence ();
1350 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1353 /* Merge the blocks! */
1354 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1356 return TRUE;
1359 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1360 straight line code. Return true if successful. */
1362 static int
1363 process_if_block (test_bb, then_bb, else_bb, join_bb)
1364 basic_block test_bb; /* Basic block test is in */
1365 basic_block then_bb; /* Basic block for THEN block */
1366 basic_block else_bb; /* Basic block for ELSE block */
1367 basic_block join_bb; /* Basic block the join label is in */
1369 if (! reload_completed
1370 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1371 return TRUE;
1373 if (HAVE_conditional_execution
1374 && reload_completed
1375 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1376 return TRUE;
1378 return FALSE;
1381 /* Merge the blocks and mark for local life update. */
1383 static void
1384 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1385 basic_block test_bb; /* Basic block test is in */
1386 basic_block then_bb; /* Basic block for THEN block */
1387 basic_block else_bb; /* Basic block for ELSE block */
1388 basic_block join_bb; /* Basic block the join label is in */
1390 basic_block combo_bb;
1392 /* All block merging is done into the lower block numbers. */
1394 combo_bb = test_bb;
1396 /* First merge TEST block into THEN block. This is a no-brainer since
1397 the THEN block did not have a code label to begin with. */
1399 if (combo_bb->global_live_at_end)
1400 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1401 merge_blocks_nomove (combo_bb, then_bb);
1402 num_removed_blocks++;
1404 /* The ELSE block, if it existed, had a label. That label count
1405 will almost always be zero, but odd things can happen when labels
1406 get their addresses taken. */
1407 if (else_bb)
1409 merge_blocks_nomove (combo_bb, else_bb);
1410 num_removed_blocks++;
1413 /* If there was no join block reported, that means it was not adjacent
1414 to the others, and so we cannot merge them. */
1416 if (! join_bb)
1418 /* The outgoing edge for the current COMBO block should already
1419 be correct. Verify this. */
1420 if (combo_bb->succ == NULL_EDGE)
1421 abort ();
1423 /* There should sill be a branch at the end of the THEN or ELSE
1424 blocks taking us to our final destination. */
1425 if (! simplejump_p (combo_bb->end)
1426 && ! returnjump_p (combo_bb->end))
1427 abort ();
1430 /* The JOIN block may have had quite a number of other predecessors too.
1431 Since we've already merged the TEST, THEN and ELSE blocks, we should
1432 have only one remaining edge from our if-then-else diamond. If there
1433 is more than one remaining edge, it must come from elsewhere. There
1434 may be zero incoming edges if the THEN block didn't actually join
1435 back up (as with a call to abort). */
1436 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1438 /* We can merge the JOIN. */
1439 if (combo_bb->global_live_at_end)
1440 COPY_REG_SET (combo_bb->global_live_at_end,
1441 join_bb->global_live_at_end);
1442 merge_blocks_nomove (combo_bb, join_bb);
1443 num_removed_blocks++;
1445 else
1447 /* We cannot merge the JOIN. */
1449 /* The outgoing edge for the current COMBO block should already
1450 be correct. Verify this. */
1451 if (combo_bb->succ->succ_next != NULL_EDGE
1452 || combo_bb->succ->dest != join_bb)
1453 abort ();
1455 /* Remove the jump and cruft from the end of the COMBO block. */
1456 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1459 /* Make sure we update life info properly. */
1460 SET_UPDATE_LIFE (combo_bb);
1462 num_updated_if_blocks++;
1465 /* Find a block ending in a simple IF condition. Return TRUE if
1466 we were able to transform it in some way. */
1468 static int
1469 find_if_header (test_bb)
1470 basic_block test_bb;
1472 edge then_edge;
1473 edge else_edge;
1475 /* The kind of block we're looking for has exactly two successors. */
1476 if ((then_edge = test_bb->succ) == NULL_EDGE
1477 || (else_edge = then_edge->succ_next) == NULL_EDGE
1478 || else_edge->succ_next != NULL_EDGE)
1479 return FALSE;
1481 /* Neither edge should be abnormal. */
1482 if ((then_edge->flags & EDGE_COMPLEX)
1483 || (else_edge->flags & EDGE_COMPLEX))
1484 return FALSE;
1486 /* The THEN edge is canonically the one that falls through. */
1487 if (then_edge->flags & EDGE_FALLTHRU)
1489 else if (else_edge->flags & EDGE_FALLTHRU)
1491 edge e = else_edge;
1492 else_edge = then_edge;
1493 then_edge = e;
1495 else
1496 /* Otherwise this must be a multiway branch of some sort. */
1497 return FALSE;
1499 if (find_if_block (test_bb, then_edge, else_edge))
1500 goto success;
1501 if (post_dominators
1502 && (! HAVE_conditional_execution || reload_completed))
1504 if (find_if_case_1 (test_bb, then_edge, else_edge))
1505 goto success;
1506 if (find_if_case_2 (test_bb, then_edge, else_edge))
1507 goto success;
1510 return FALSE;
1512 success:
1513 if (rtl_dump_file)
1514 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1515 return TRUE;
1518 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1519 block. If so, we'll try to convert the insns to not require the branch.
1520 Return TRUE if we were successful at converting the the block. */
1522 static int
1523 find_if_block (test_bb, then_edge, else_edge)
1524 basic_block test_bb;
1525 edge then_edge, else_edge;
1527 basic_block then_bb = then_edge->dest;
1528 basic_block else_bb = else_edge->dest;
1529 basic_block join_bb = NULL_BLOCK;
1530 edge then_succ = then_bb->succ;
1531 edge else_succ = else_bb->succ;
1532 int next_index;
1534 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1535 if (then_bb->pred->pred_next != NULL_EDGE)
1536 return FALSE;
1538 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1539 if (then_succ != NULL_EDGE
1540 && (then_succ->succ_next != NULL_EDGE
1541 || (then_succ->flags & EDGE_COMPLEX)))
1542 return FALSE;
1544 /* If the THEN block has no successors, conditional execution can still
1545 make a conditional call. Don't do this unless the ELSE block has
1546 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1547 Check for the last insn of the THEN block being an indirect jump, which
1548 is listed as not having any successors, but confuses the rest of the CE
1549 code processing. XXX we should fix this in the future. */
1550 if (then_succ == NULL)
1552 if (else_bb->pred->pred_next == NULL_EDGE)
1554 rtx last_insn = then_bb->end;
1556 while (last_insn
1557 && GET_CODE (last_insn) == NOTE
1558 && last_insn != then_bb->head)
1559 last_insn = PREV_INSN (last_insn);
1561 if (last_insn
1562 && GET_CODE (last_insn) == JUMP_INSN
1563 && ! simplejump_p (last_insn))
1564 return FALSE;
1566 join_bb = else_bb;
1567 else_bb = NULL_BLOCK;
1569 else
1570 return FALSE;
1573 /* If the THEN block's successor is the other edge out of the TEST block,
1574 then we have an IF-THEN combo without an ELSE. */
1575 else if (then_succ->dest == else_bb)
1577 join_bb = else_bb;
1578 else_bb = NULL_BLOCK;
1581 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1582 has exactly one predecessor and one successor, and the outgoing edge
1583 is not complex, then we have an IF-THEN-ELSE combo. */
1584 else if (else_succ != NULL_EDGE
1585 && then_succ->dest == else_succ->dest
1586 && else_bb->pred->pred_next == NULL_EDGE
1587 && else_succ->succ_next == NULL_EDGE
1588 && ! (else_succ->flags & EDGE_COMPLEX))
1589 join_bb = else_succ->dest;
1591 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1592 else
1593 return FALSE;
1595 num_possible_if_blocks++;
1597 if (rtl_dump_file)
1599 if (else_bb)
1600 fprintf (rtl_dump_file,
1601 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1602 test_bb->index, then_bb->index, else_bb->index,
1603 join_bb->index);
1604 else
1605 fprintf (rtl_dump_file,
1606 "\nIF-THEN block found, start %d, then %d, join %d\n",
1607 test_bb->index, then_bb->index, join_bb->index);
1610 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1611 get the first condition for free, since we've already asserted that
1612 there's a fallthru edge from IF to THEN. */
1613 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1614 BLOCK notes, if by no other means than aborting the merge if they
1615 exist. Sticky enough I don't want to think about it now. */
1616 next_index = then_bb->index;
1617 if (else_bb && ++next_index != else_bb->index)
1618 return FALSE;
1619 if (++next_index != join_bb->index)
1621 if (else_bb)
1622 join_bb = NULL;
1623 else
1624 return FALSE;
1627 /* Do the real work. */
1628 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1631 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1632 transformable, but not necessarily the other. There need be no
1633 JOIN block.
1635 Return TRUE if we were successful at converting the the block.
1637 Cases we'd like to look at:
1640 if (test) goto over; // x not live
1641 x = a;
1642 goto label;
1643 over:
1645 becomes
1647 x = a;
1648 if (! test) goto label;
1651 if (test) goto E; // x not live
1652 x = big();
1653 goto L;
1655 x = b;
1656 goto M;
1658 becomes
1660 x = b;
1661 if (test) goto M;
1662 x = big();
1663 goto L;
1665 (3) // This one's really only interesting for targets that can do
1666 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1667 // it results in multiple branches on a cache line, which often
1668 // does not sit well with predictors.
1670 if (test1) goto E; // predicted not taken
1671 x = a;
1672 if (test2) goto F;
1675 x = b;
1678 becomes
1680 x = a;
1681 if (test1) goto E;
1682 if (test2) goto F;
1684 Notes:
1686 (A) Don't do (2) if the branch is predicted against the block we're
1687 eliminating. Do it anyway if we can eliminate a branch; this requires
1688 that the sole successor of the eliminated block postdominate the other
1689 side of the if.
1691 (B) With CE, on (3) we can steal from both sides of the if, creating
1693 if (test1) x = a;
1694 if (!test1) x = b;
1695 if (test1) goto J;
1696 if (test2) goto F;
1700 Again, this is most useful if J postdominates.
1702 (C) CE substitutes for helpful life information.
1704 (D) These heuristics need a lot of work. */
1706 /* Tests for case 1 above. */
1708 static int
1709 find_if_case_1 (test_bb, then_edge, else_edge)
1710 basic_block test_bb;
1711 edge then_edge, else_edge;
1713 basic_block then_bb = then_edge->dest;
1714 basic_block else_bb = else_edge->dest;
1715 edge then_succ = then_bb->succ;
1716 rtx new_lab;
1718 /* THEN has one successor. */
1719 if (!then_succ || then_succ->succ_next != NULL)
1720 return FALSE;
1722 /* THEN does not fall through, but is not strange either. */
1723 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
1724 return FALSE;
1726 /* THEN has one predecessor. */
1727 if (then_bb->pred->pred_next != NULL)
1728 return FALSE;
1730 /* ELSE follows THEN. (??? could be moved) */
1731 if (else_bb->index != then_bb->index + 1)
1732 return FALSE;
1734 num_possible_if_blocks++;
1735 if (rtl_dump_file)
1736 fprintf (rtl_dump_file,
1737 "\nIF-CASE-1 found, start %d, then %d\n",
1738 test_bb->index, then_bb->index);
1740 /* THEN is small. */
1741 if (count_bb_insns (then_bb) > BRANCH_COST)
1742 return FALSE;
1744 /* Find the label for THEN's destination. */
1745 if (then_succ->dest == EXIT_BLOCK_PTR)
1746 new_lab = NULL_RTX;
1747 else
1749 new_lab = JUMP_LABEL (then_bb->end);
1750 if (! new_lab)
1751 abort ();
1754 /* Registers set are dead, or are predicable. */
1755 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
1756 return FALSE;
1758 /* Conversion went ok, including moving the insns and fixing up the
1759 jump. Adjust the CFG to match. */
1761 SET_UPDATE_LIFE (test_bb);
1762 bitmap_operation (test_bb->global_live_at_end,
1763 else_bb->global_live_at_start,
1764 then_bb->global_live_at_end, BITMAP_IOR);
1766 make_edge (NULL, test_bb, then_succ->dest, 0);
1767 flow_delete_block (then_bb);
1768 tidy_fallthru_edge (else_edge, test_bb, else_bb);
1770 num_removed_blocks++;
1771 num_updated_if_blocks++;
1773 return TRUE;
1776 /* Test for case 2 above. */
1778 static int
1779 find_if_case_2 (test_bb, then_edge, else_edge)
1780 basic_block test_bb;
1781 edge then_edge, else_edge;
1783 basic_block then_bb = then_edge->dest;
1784 basic_block else_bb = else_edge->dest;
1785 edge else_succ = else_bb->succ;
1786 rtx new_lab, note;
1788 /* ELSE has one successor. */
1789 if (!else_succ || else_succ->succ_next != NULL)
1790 return FALSE;
1792 /* ELSE outgoing edge is not complex. */
1793 if (else_succ->flags & EDGE_COMPLEX)
1794 return FALSE;
1796 /* ELSE has one predecessor. */
1797 if (else_bb->pred->pred_next != NULL)
1798 return FALSE;
1800 /* THEN is not EXIT. */
1801 if (then_bb->index < 0)
1802 return FALSE;
1804 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1805 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
1806 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
1808 else if (else_succ->dest->index < 0
1809 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
1810 ORIG_INDEX (else_succ->dest)))
1812 else
1813 return FALSE;
1815 num_possible_if_blocks++;
1816 if (rtl_dump_file)
1817 fprintf (rtl_dump_file,
1818 "\nIF-CASE-2 found, start %d, else %d\n",
1819 test_bb->index, else_bb->index);
1821 /* ELSE is small. */
1822 if (count_bb_insns (then_bb) > BRANCH_COST)
1823 return FALSE;
1825 /* Find the label for ELSE's destination. */
1826 if (else_succ->dest == EXIT_BLOCK_PTR)
1827 new_lab = NULL_RTX;
1828 else
1830 if (else_succ->flags & EDGE_FALLTHRU)
1832 new_lab = else_succ->dest->head;
1833 if (GET_CODE (new_lab) != CODE_LABEL)
1834 abort ();
1836 else
1838 new_lab = JUMP_LABEL (else_bb->end);
1839 if (! new_lab)
1840 abort ();
1844 /* Registers set are dead, or are predicable. */
1845 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
1846 return FALSE;
1848 /* Conversion went ok, including moving the insns and fixing up the
1849 jump. Adjust the CFG to match. */
1851 SET_UPDATE_LIFE (test_bb);
1852 bitmap_operation (test_bb->global_live_at_end,
1853 then_bb->global_live_at_start,
1854 else_bb->global_live_at_end, BITMAP_IOR);
1856 remove_edge (else_edge);
1857 make_edge (NULL, test_bb, else_succ->dest, 0);
1858 flow_delete_block (else_bb);
1860 num_removed_blocks++;
1861 num_updated_if_blocks++;
1863 /* ??? We may now fallthru from one of THEN's successors into a join
1864 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1866 return TRUE;
1869 /* A subroutine of dead_or_predicable called through for_each_rtx.
1870 Return 1 if a memory is found. */
1872 static int
1873 find_memory (px, data)
1874 rtx *px;
1875 void *data ATTRIBUTE_UNUSED;
1877 return GET_CODE (*px) == MEM;
1880 /* Used by the code above to perform the actual rtl transformations.
1881 Return TRUE if successful.
1883 TEST_BB is the block containing the conditional branch. MERGE_BB
1884 is the block containing the code to manipulate. NEW_DEST is the
1885 label TEST_BB should be branching to after the conversion.
1886 REVERSEP is true if the sense of the branch should be reversed. */
1888 static int
1889 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
1890 basic_block test_bb, merge_bb, other_bb;
1891 rtx new_dest;
1892 int reversep;
1894 rtx head, end, jump, earliest, old_dest;
1896 /* No code movement can occur if we'd be scrogging EH regions.
1897 Within MERGE_BB, ensure that we've not got stray EH_BEG or EH_END
1898 notes within the block. Between the blocks, checking that the end
1899 region numbers match ensures that we won't disrupt the nesting
1900 between regions. */
1901 if (merge_bb->eh_beg != merge_bb->eh_end
1902 || merge_bb->eh_end != test_bb->eh_end)
1903 return FALSE;
1905 jump = test_bb->end;
1907 /* Find the extent of the real code in the merge block. */
1908 head = merge_bb->head;
1909 end = merge_bb->end;
1911 if (GET_CODE (head) == CODE_LABEL)
1912 head = NEXT_INSN (head);
1913 if (GET_CODE (head) == NOTE)
1915 if (head == end)
1917 head = end = NULL_RTX;
1918 goto no_body;
1920 head = NEXT_INSN (head);
1923 if (GET_CODE (end) == JUMP_INSN)
1925 if (head == end)
1927 head = end = NULL_RTX;
1928 goto no_body;
1930 end = PREV_INSN (end);
1933 /* Disable handling dead code by conditional execution if the machine needs
1934 to do anything funny with the tests, etc. */
1935 #ifndef IFCVT_MODIFY_TESTS
1936 if (HAVE_conditional_execution)
1938 /* In the conditional execution case, we have things easy. We know
1939 the condition is reversable. We don't have to check life info,
1940 becase we're going to conditionally execute the code anyway.
1941 All that's left is making sure the insns involved can actually
1942 be predicated. */
1944 rtx cond, prob_val;
1946 cond = cond_exec_get_condition (jump);
1948 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1949 if (prob_val)
1950 prob_val = XEXP (prob_val, 0);
1952 if (reversep)
1954 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1955 GET_MODE (cond), XEXP (cond, 0),
1956 XEXP (cond, 1));
1957 if (prob_val)
1958 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
1961 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
1962 goto cancel;
1964 earliest = jump;
1966 else
1967 #endif
1969 /* In the non-conditional execution case, we have to verify that there
1970 are no trapping operations, no calls, no references to memory, and
1971 that any registers modified are dead at the branch site. */
1973 rtx insn, cond, prev;
1974 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
1975 regset merge_set, tmp, test_live, test_set;
1976 struct propagate_block_info *pbi;
1977 int i, fail = 0;
1979 /* Check for no calls or trapping operations. */
1980 for (insn = head; ; insn = NEXT_INSN (insn))
1982 if (GET_CODE (insn) == CALL_INSN)
1983 return FALSE;
1984 if (INSN_P (insn))
1986 if (may_trap_p (PATTERN (insn)))
1987 return FALSE;
1989 /* ??? Even non-trapping memories such as stack frame
1990 references must be avoided. For stores, we collect
1991 no lifetime info; for reads, we'd have to assert
1992 true_dependance false against every store in the
1993 TEST range. */
1994 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
1995 return FALSE;
1997 if (insn == end)
1998 break;
2001 if (! any_condjump_p (jump))
2002 return FALSE;
2004 /* Find the extent of the conditional. */
2005 cond = noce_get_condition (jump, &earliest);
2006 if (! cond)
2007 return FALSE;
2009 /* Collect:
2010 MERGE_SET = set of registers set in MERGE_BB
2011 TEST_LIVE = set of registers live at EARLIEST
2012 TEST_SET = set of registers set between EARLIEST and the
2013 end of the block. */
2015 tmp = INITIALIZE_REG_SET (tmp_head);
2016 merge_set = INITIALIZE_REG_SET (merge_set_head);
2017 test_live = INITIALIZE_REG_SET (test_live_head);
2018 test_set = INITIALIZE_REG_SET (test_set_head);
2020 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2021 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2022 since we've already asserted that MERGE_BB is small. */
2023 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2025 /* For small register class machines, don't lengthen lifetimes of
2026 hard registers before reload. */
2027 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2029 EXECUTE_IF_SET_IN_BITMAP
2030 (merge_set, 0, i,
2032 if (i < FIRST_PSEUDO_REGISTER
2033 && ! fixed_regs[i]
2034 && ! global_regs[i])
2035 fail = 1;
2039 /* For TEST, we're interested in a range of insns, not a whole block.
2040 Moreover, we're interested in the insns live from OTHER_BB. */
2042 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2043 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2046 for (insn = jump; ; insn = prev)
2048 prev = propagate_one_insn (pbi, insn);
2049 if (insn == earliest)
2050 break;
2053 free_propagate_block_info (pbi);
2055 /* We can perform the transformation if
2056 MERGE_SET & (TEST_SET | TEST_LIVE)
2058 TEST_SET & merge_bb->global_live_at_start
2059 are empty. */
2061 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2062 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2063 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2065 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2066 BITMAP_AND);
2067 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2069 FREE_REG_SET (tmp);
2070 FREE_REG_SET (merge_set);
2071 FREE_REG_SET (test_live);
2072 FREE_REG_SET (test_set);
2074 if (fail)
2075 return FALSE;
2078 no_body:
2079 /* We don't want to use normal invert_jump or redirect_jump because
2080 we don't want to delete_insn called. Also, we want to do our own
2081 change group management. */
2083 old_dest = JUMP_LABEL (jump);
2084 if (reversep
2085 ? ! invert_jump_1 (jump, new_dest)
2086 : ! redirect_jump_1 (jump, new_dest))
2087 goto cancel;
2089 if (! apply_change_group ())
2090 return FALSE;
2092 if (old_dest)
2093 LABEL_NUSES (old_dest) -= 1;
2094 if (new_dest)
2095 LABEL_NUSES (new_dest) += 1;
2096 JUMP_LABEL (jump) = new_dest;
2098 if (reversep)
2100 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2101 if (note)
2102 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2105 /* Move the insns out of MERGE_BB to before the branch. */
2106 if (head != NULL)
2108 if (end == merge_bb->end)
2109 merge_bb->end = PREV_INSN (head);
2111 head = squeeze_notes (head, end);
2112 if (GET_CODE (end) == NOTE
2113 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2114 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2115 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2116 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2117 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2118 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2120 if (head == end)
2121 return TRUE;
2122 end = PREV_INSN (end);
2125 reorder_insns (head, end, PREV_INSN (earliest));
2127 return TRUE;
2129 cancel:
2130 cancel_changes (0);
2131 return FALSE;
2134 /* Main entry point for all if-conversion. */
2136 void
2137 if_convert (life_data_ok)
2138 int life_data_ok;
2140 int block_num;
2142 num_possible_if_blocks = 0;
2143 num_updated_if_blocks = 0;
2144 num_removed_blocks = 0;
2146 /* Free up basic_block_for_insn so that we don't have to keep it
2147 up to date, either here or in merge_blocks_nomove. */
2148 free_basic_block_vars (1);
2150 /* Compute postdominators if we think we'll use them. */
2151 post_dominators = NULL;
2152 if (HAVE_conditional_execution || life_data_ok)
2154 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2155 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2158 /* Record initial block numbers. */
2159 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2160 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2162 /* Go through each of the basic blocks looking for things to convert. */
2163 for (block_num = 0; block_num < n_basic_blocks; )
2165 basic_block bb = BASIC_BLOCK (block_num);
2166 if (find_if_header (bb))
2167 block_num = bb->index;
2168 else
2169 block_num++;
2172 if (post_dominators)
2173 sbitmap_vector_free (post_dominators);
2175 if (rtl_dump_file)
2176 fflush (rtl_dump_file);
2178 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2179 compute_bb_for_insn (get_max_uid ());
2181 /* Rebuild life info for basic blocks that require it. */
2182 if (num_removed_blocks && life_data_ok)
2184 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2185 sbitmap_zero (update_life_blocks);
2187 /* If we allocated new pseudos, we must resize the array for sched1. */
2188 if (max_regno < max_reg_num ())
2190 max_regno = max_reg_num ();
2191 allocate_reg_info (max_regno, FALSE, FALSE);
2194 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2195 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2196 SET_BIT (update_life_blocks, block_num);
2198 count_or_remove_death_notes (update_life_blocks, 1);
2199 /* ??? See about adding a mode that verifies that the initial
2200 set of blocks don't let registers come live. */
2201 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2202 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2203 | PROP_KILL_DEAD_CODE);
2205 sbitmap_free (update_life_blocks);
2208 /* Write the final stats. */
2209 if (rtl_dump_file && num_possible_if_blocks > 0)
2211 fprintf (rtl_dump_file,
2212 "\n%d possible IF blocks searched.\n",
2213 num_possible_if_blocks);
2214 fprintf (rtl_dump_file,
2215 "%d IF blocks converted.\n",
2216 num_updated_if_blocks);
2217 fprintf (rtl_dump_file,
2218 "%d basic blocks deleted.\n\n\n",
2219 num_removed_blocks);
2222 #ifdef ENABLE_CHECKING
2223 verify_flow_info ();
2224 #endif